save progress

This commit is contained in:
StellaOps Bot
2026-01-06 09:42:02 +02:00
parent 94d68bee8b
commit 37e11918e0
443 changed files with 85863 additions and 897 deletions

View File

@@ -107,7 +107,6 @@ public class ReachGraphE2ETests : IClassFixture<WebApplicationFactory<StellaOps.
var fetchedGraph = await getResponse.Content.ReadFromJsonAsync<ReachGraphMinimal>();
Assert.NotNull(fetchedGraph);
Assert.NotNull(fetchedGraph.Edges);
// Verify edge explanations are preserved
var edgeTypes = fetchedGraph.Edges.Select(e => e.Why.Type).Distinct().ToList();

View File

@@ -59,7 +59,7 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
// Verify connection works
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
await connection.OpenAsync();
await connection.OpenAsync(TestContext.Current.CancellationToken);
connection.State.Should().Be(System.Data.ConnectionState.Open);
}
@@ -79,12 +79,12 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
{
// Arrange
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
await connection.OpenAsync();
await connection.OpenAsync(TestContext.Current.CancellationToken);
// Act - Create a test schema
using var createCmd = connection.CreateCommand();
createCmd.CommandText = "CREATE SCHEMA IF NOT EXISTS test_platform";
await createCmd.ExecuteNonQueryAsync();
await createCmd.ExecuteNonQueryAsync(TestContext.Current.CancellationToken);
// Assert - Verify schema exists
using var verifyCmd = connection.CreateCommand();
@@ -92,7 +92,7 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
SELECT schema_name
FROM information_schema.schemata
WHERE schema_name = 'test_platform'";
var result = await verifyCmd.ExecuteScalarAsync();
var result = await verifyCmd.ExecuteScalarAsync(TestContext.Current.CancellationToken);
result.Should().Be("test_platform");
}
@@ -101,7 +101,7 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
{
// Arrange
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
await connection.OpenAsync();
await connection.OpenAsync(TestContext.Current.CancellationToken);
// Create test table
using var createCmd = connection.CreateCommand();
@@ -111,33 +111,33 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
name VARCHAR(100) NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW()
)";
await createCmd.ExecuteNonQueryAsync();
await createCmd.ExecuteNonQueryAsync(TestContext.Current.CancellationToken);
// Act - Insert
using var insertCmd = connection.CreateCommand();
insertCmd.CommandText = "INSERT INTO test_crud (name) VALUES ('test-record') RETURNING id";
var insertedId = await insertCmd.ExecuteScalarAsync();
var insertedId = await insertCmd.ExecuteScalarAsync(TestContext.Current.CancellationToken);
insertedId.Should().NotBeNull();
// Act - Select
using var selectCmd = connection.CreateCommand();
selectCmd.CommandText = "SELECT name FROM test_crud WHERE id = @id";
selectCmd.Parameters.AddWithValue("id", insertedId!);
var name = await selectCmd.ExecuteScalarAsync();
var name = await selectCmd.ExecuteScalarAsync(TestContext.Current.CancellationToken);
name.Should().Be("test-record");
// Act - Update
using var updateCmd = connection.CreateCommand();
updateCmd.CommandText = "UPDATE test_crud SET name = 'updated-record' WHERE id = @id";
updateCmd.Parameters.AddWithValue("id", insertedId!);
var rowsAffected = await updateCmd.ExecuteNonQueryAsync();
var rowsAffected = await updateCmd.ExecuteNonQueryAsync(TestContext.Current.CancellationToken);
rowsAffected.Should().Be(1);
// Act - Delete
using var deleteCmd = connection.CreateCommand();
deleteCmd.CommandText = "DELETE FROM test_crud WHERE id = @id";
deleteCmd.Parameters.AddWithValue("id", insertedId!);
rowsAffected = await deleteCmd.ExecuteNonQueryAsync();
rowsAffected = await deleteCmd.ExecuteNonQueryAsync(TestContext.Current.CancellationToken);
rowsAffected.Should().Be(1);
}
@@ -150,7 +150,7 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
{
// Arrange
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
await connection.OpenAsync();
await connection.OpenAsync(TestContext.Current.CancellationToken);
// Act - Run a migration-like DDL script
var migrationScript = @"
@@ -177,12 +177,12 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
using var migrateCmd = connection.CreateCommand();
migrateCmd.CommandText = migrationScript;
await migrateCmd.ExecuteNonQueryAsync();
await migrateCmd.ExecuteNonQueryAsync(TestContext.Current.CancellationToken);
// Assert - Verify migration recorded
using var verifyCmd = connection.CreateCommand();
verifyCmd.CommandText = "SELECT COUNT(*) FROM schema_migrations WHERE version = 'V2_create_scan_results'";
var count = await verifyCmd.ExecuteScalarAsync();
var count = await verifyCmd.ExecuteScalarAsync(TestContext.Current.CancellationToken);
Convert.ToInt32(count).Should().Be(1);
}
@@ -191,17 +191,17 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
{
// Arrange
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
await connection.OpenAsync();
await connection.OpenAsync(TestContext.Current.CancellationToken);
// Act - Create common extensions used by StellaOps
using var extCmd = connection.CreateCommand();
extCmd.CommandText = "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"";
await extCmd.ExecuteNonQueryAsync();
await extCmd.ExecuteNonQueryAsync(TestContext.Current.CancellationToken);
// Assert - Verify extension exists
using var verifyCmd = connection.CreateCommand();
verifyCmd.CommandText = "SELECT COUNT(*) FROM pg_extension WHERE extname = 'uuid-ossp'";
var count = await verifyCmd.ExecuteScalarAsync();
var count = await verifyCmd.ExecuteScalarAsync(TestContext.Current.CancellationToken);
Convert.ToInt32(count).Should().Be(1);
}

View File

@@ -0,0 +1,363 @@
// <copyright file="ConvergenceTrackerTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Testing.Temporal;
namespace StellaOps.Testing.Chaos.Tests;
/// <summary>
/// Unit tests for <see cref="DefaultConvergenceTracker"/>.
/// </summary>
public sealed class ConvergenceTrackerTests
{
private readonly SimulatedTimeProvider _timeProvider;
private readonly DefaultConvergenceTracker _tracker;
public ConvergenceTrackerTests()
{
_timeProvider = new SimulatedTimeProvider();
_tracker = new DefaultConvergenceTracker(
_timeProvider,
NullLogger<DefaultConvergenceTracker>.Instance,
pollInterval: TimeSpan.FromMilliseconds(1)); // Use 1ms to avoid real delays
}
[Fact]
public async Task CaptureSnapshotAsync_NoProbes_ReturnsEmptySnapshot()
{
// Act
var snapshot = await _tracker.CaptureSnapshotAsync(TestContext.Current.CancellationToken);
// Assert
Assert.Empty(snapshot.ProbeResults);
Assert.Equal(_timeProvider.GetUtcNow(), snapshot.CapturedAt);
}
[Fact]
public async Task CaptureSnapshotAsync_WithProbes_CapturesAllResults()
{
// Arrange
var probe1 = new DelegateProbe("probe-1", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, [])));
var probe2 = new DelegateProbe("probe-2", _ => Task.FromResult(
new ProbeResult(false, ImmutableDictionary<string, object>.Empty, ["error"])));
_tracker.RegisterProbe(probe1);
_tracker.RegisterProbe(probe2);
// Act
var snapshot = await _tracker.CaptureSnapshotAsync(TestContext.Current.CancellationToken);
// Assert
Assert.Equal(2, snapshot.ProbeResults.Count);
Assert.True(snapshot.ProbeResults["probe-1"].IsHealthy);
Assert.False(snapshot.ProbeResults["probe-2"].IsHealthy);
}
[Fact]
public async Task CaptureSnapshotAsync_ProbeThrows_RecordsFailure()
{
// Arrange
var failingProbe = new DelegateProbe("failing", _ =>
throw new InvalidOperationException("Probe failed"));
_tracker.RegisterProbe(failingProbe);
// Act
var snapshot = await _tracker.CaptureSnapshotAsync(TestContext.Current.CancellationToken);
// Assert
Assert.Single(snapshot.ProbeResults);
Assert.False(snapshot.ProbeResults["failing"].IsHealthy);
Assert.Contains("Probe failed", snapshot.ProbeResults["failing"].Anomalies[0]);
}
[Fact]
public async Task RegisterProbe_AddsProbe()
{
// Arrange
var probe = new DelegateProbe("test", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, [])));
// Act
_tracker.RegisterProbe(probe);
// Assert - should be included in snapshot
var snapshot = await _tracker.CaptureSnapshotAsync(TestContext.Current.CancellationToken);
Assert.Contains("test", snapshot.ProbeResults.Keys);
}
[Fact]
public async Task UnregisterProbe_RemovesProbe()
{
// Arrange
var probe = new DelegateProbe("test", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, [])));
_tracker.RegisterProbe(probe);
// Act
_tracker.UnregisterProbe("test");
// Assert - should not be in snapshot
var snapshot = await _tracker.CaptureSnapshotAsync(TestContext.Current.CancellationToken);
Assert.DoesNotContain("test", snapshot.ProbeResults.Keys);
}
[Fact]
public async Task WaitForConvergenceAsync_AllHealthy_ReturnsConverged()
{
// Arrange
var probe = new DelegateProbe("healthy", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, [])));
_tracker.RegisterProbe(probe);
var expectations = new ConvergenceExpectations(RequireAllHealthy: true);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromSeconds(1), TestContext.Current.CancellationToken);
// Assert
Assert.True(result.HasConverged);
Assert.Empty(result.Violations);
Assert.Equal(1, result.ConvergenceAttempts);
Assert.NotNull(result.TimeToConverge);
}
[Fact]
public async Task WaitForConvergenceAsync_UnhealthyComponent_ReturnsNotConverged()
{
// Arrange
var probe = new DelegateProbe("unhealthy", _ => Task.FromResult(
new ProbeResult(false, ImmutableDictionary<string, object>.Empty, [])));
_tracker.RegisterProbe(probe);
var expectations = new ConvergenceExpectations(RequireAllHealthy: true);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromMilliseconds(50), TestContext.Current.CancellationToken);
// Assert
Assert.False(result.HasConverged);
Assert.Contains("Unhealthy components: unhealthy", result.Violations);
Assert.Null(result.TimeToConverge);
}
[Fact]
public async Task WaitForConvergenceAsync_EventuallyConverges_ReturnsSuccess()
{
// Arrange
var callCount = 0;
var probe = new DelegateProbe("eventual", _ =>
{
callCount++;
var isHealthy = callCount >= 3; // Becomes healthy after 2 failures
return Task.FromResult(
new ProbeResult(isHealthy, ImmutableDictionary<string, object>.Empty, []));
});
_tracker.RegisterProbe(probe);
var expectations = new ConvergenceExpectations(RequireAllHealthy: true);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromMilliseconds(100), TestContext.Current.CancellationToken);
// Assert
Assert.True(result.HasConverged);
Assert.True(result.ConvergenceAttempts >= 3); // At least 3 attempts to converge
}
[Fact]
public async Task WaitForConvergenceAsync_RequiredComponent_NotFound_ReportsViolation()
{
// Arrange
var expectations = new ConvergenceExpectations(
RequireAllHealthy: false,
RequiredHealthyComponents: ["missing-component"]);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromMilliseconds(50), TestContext.Current.CancellationToken);
// Assert
Assert.False(result.HasConverged);
Assert.Contains("Required component 'missing-component' not found", result.Violations);
}
[Fact]
public async Task WaitForConvergenceAsync_RequiredComponent_Unhealthy_ReportsViolation()
{
// Arrange
var probe = new DelegateProbe("critical-service", _ => Task.FromResult(
new ProbeResult(false, ImmutableDictionary<string, object>.Empty, [])));
_tracker.RegisterProbe(probe);
var expectations = new ConvergenceExpectations(
RequireAllHealthy: false,
RequiredHealthyComponents: ["critical-service"]);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromMilliseconds(50), TestContext.Current.CancellationToken);
// Assert
Assert.False(result.HasConverged);
Assert.Contains("Required component 'critical-service' is unhealthy", result.Violations);
}
[Fact]
public async Task WaitForConvergenceAsync_Cancellation_Throws()
{
// Arrange
var probe = new DelegateProbe("slow", async ct =>
{
await Task.Delay(TimeSpan.FromSeconds(10), ct);
return new ProbeResult(true, ImmutableDictionary<string, object>.Empty, []);
});
_tracker.RegisterProbe(probe);
using var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(50));
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(
() => _tracker.WaitForConvergenceAsync(
new ConvergenceExpectations(),
TimeSpan.FromSeconds(10),
cts.Token));
}
[Fact]
public async Task WaitForConvergenceAsync_OrphanedResources_ReportsViolation()
{
// Arrange
var probe = new DelegateProbe("resource-tracker", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, ["orphan file detected"])));
_tracker.RegisterProbe(probe);
var expectations = new ConvergenceExpectations(RequireNoOrphanedResources: true);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromMilliseconds(50), TestContext.Current.CancellationToken);
// Assert
Assert.False(result.HasConverged);
Assert.Contains(result.Violations, v => v.Contains("Orphaned resources"));
}
[Fact]
public async Task WaitForConvergenceAsync_MetricValidation_ReportsViolation()
{
// Arrange
var metrics = new Dictionary<string, object> { ["cpu_usage"] = 95.0 };
var probe = new DelegateProbe("metrics", _ => Task.FromResult(
new ProbeResult(true, metrics.ToImmutableDictionary(), [])));
_tracker.RegisterProbe(probe);
var validators = new Dictionary<string, Func<object, bool>>
{
["cpu_usage"] = value => (double)value < 80.0 // Should fail - CPU is 95%
}.ToImmutableDictionary();
var expectations = new ConvergenceExpectations(
RequireAllHealthy: false,
RequireMetricsAccurate: true,
MetricValidators: validators);
// Act
var result = await _tracker.WaitForConvergenceAsync(expectations, TimeSpan.FromMilliseconds(50), TestContext.Current.CancellationToken);
// Assert
Assert.False(result.HasConverged);
Assert.Contains("Metric 'cpu_usage' failed validation", result.Violations);
}
}
/// <summary>
/// Unit tests for probe implementations.
/// </summary>
public sealed class ProbeTests
{
[Fact]
public async Task ComponentHealthProbe_ReturnsInjectorHealth()
{
// Arrange
var registry = new FailureInjectorRegistry();
var injector = registry.GetOrCreateInjector("postgres-main");
await injector.InjectAsync("postgres-main", FailureType.Degraded, TestContext.Current.CancellationToken);
var probe = new ComponentHealthProbe(registry, "postgres-main");
// Act
var result = await probe.ProbeAsync(TestContext.Current.CancellationToken);
// Assert
Assert.False(result.IsHealthy);
Assert.Equal("component:postgres-main", probe.Name);
}
[Fact]
public async Task DelegateProbe_ExecutesDelegate()
{
// Arrange
var executed = false;
var probe = new DelegateProbe("custom", _ =>
{
executed = true;
return Task.FromResult(new ProbeResult(
true,
ImmutableDictionary<string, object>.Empty,
[]));
});
// Act
var result = await probe.ProbeAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(executed);
Assert.True(result.IsHealthy);
Assert.Equal("custom", probe.Name);
}
[Fact]
public async Task AggregateProbe_CombinesResults()
{
// Arrange
var probe1 = new DelegateProbe("p1", _ => Task.FromResult(
new ProbeResult(true, new Dictionary<string, object> { ["m1"] = 1 }.ToImmutableDictionary(), [])));
var probe2 = new DelegateProbe("p2", _ => Task.FromResult(
new ProbeResult(false, new Dictionary<string, object> { ["m2"] = 2 }.ToImmutableDictionary(), ["error"])));
var aggregate = new AggregateProbe("combined", [probe1, probe2]);
// Act
var result = await aggregate.ProbeAsync(TestContext.Current.CancellationToken);
// Assert
Assert.False(result.IsHealthy); // One unhealthy means aggregate is unhealthy
Assert.Equal(2, result.Metrics.Count);
Assert.Contains("p1:m1", result.Metrics.Keys);
Assert.Contains("p2:m2", result.Metrics.Keys);
Assert.Single(result.Anomalies);
Assert.Contains("p2: error", result.Anomalies);
Assert.Equal("combined", aggregate.Name);
}
[Fact]
public async Task AggregateProbe_AllHealthy_IsHealthy()
{
// Arrange
var probe1 = new DelegateProbe("p1", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, [])));
var probe2 = new DelegateProbe("p2", _ => Task.FromResult(
new ProbeResult(true, ImmutableDictionary<string, object>.Empty, [])));
var aggregate = new AggregateProbe("all-healthy", [probe1, probe2]);
// Act
var result = await aggregate.ProbeAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.IsHealthy);
Assert.Empty(result.Anomalies);
}
}

View File

@@ -0,0 +1,327 @@
// <copyright file="FailureChoreographerTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Testing.Temporal;
namespace StellaOps.Testing.Chaos.Tests;
/// <summary>
/// Unit tests for <see cref="FailureChoreographer"/>.
/// </summary>
public sealed class FailureChoreographerTests
{
private readonly SimulatedTimeProvider _timeProvider;
private readonly FailureInjectorRegistry _registry;
private readonly FailureChoreographer _choreographer;
public FailureChoreographerTests()
{
_timeProvider = new SimulatedTimeProvider();
_registry = new FailureInjectorRegistry();
_choreographer = new FailureChoreographer(
_registry,
_timeProvider,
NullLogger<FailureChoreographer>.Instance);
}
[Fact]
public async Task ExecuteAsync_EmptyChoreography_ReturnsSuccess()
{
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.Empty(result.Steps);
}
[Fact]
public void InjectFailure_AddsStepToChoreography()
{
// Arrange
_choreographer.InjectFailure("postgres-main", FailureType.Unavailable);
// Assert
Assert.Equal(1, _choreographer.StepCount);
}
[Fact]
public async Task ExecuteAsync_InjectsFailure_ComponentBecomesUnhealthy()
{
// Arrange
_choreographer.InjectFailure("postgres-main", FailureType.Unavailable);
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.Single(result.Steps);
Assert.Equal(StepType.InjectFailure, result.Steps[0].StepType);
// Verify component is now unhealthy
var injector = _registry.GetOrCreateInjector("postgres-main");
var health = await injector.GetHealthAsync("postgres-main", TestContext.Current.CancellationToken);
Assert.False(health.IsHealthy);
Assert.Equal(FailureType.Unavailable, health.CurrentFailure);
}
[Fact]
public async Task ExecuteAsync_RecoverComponent_ComponentBecomesHealthy()
{
// Arrange
_choreographer
.InjectFailure("redis-cache", FailureType.Timeout)
.RecoverComponent("redis-cache");
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.Equal(2, result.Steps.Length);
// Verify component is healthy again
var injector = _registry.GetOrCreateInjector("redis-cache");
var health = await injector.GetHealthAsync("redis-cache", TestContext.Current.CancellationToken);
Assert.True(health.IsHealthy);
}
[Fact]
public async Task ExecuteAsync_WithDelay_AdvancesSimulatedTime()
{
// Arrange
var startTime = _timeProvider.GetUtcNow();
_choreographer
.InjectFailure("service-a", FailureType.Degraded, delay: TimeSpan.FromMinutes(5))
.Wait(TimeSpan.FromMinutes(10));
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.Equal(TimeSpan.FromMinutes(15), result.TotalDuration);
}
[Fact]
public async Task ExecuteAsync_ExecuteOperation_RunsOperation()
{
// Arrange
var operationExecuted = false;
_choreographer.ExecuteOperation(
"test-operation",
() =>
{
operationExecuted = true;
return Task.CompletedTask;
});
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.True(operationExecuted);
}
[Fact]
public async Task ExecuteAsync_ExecuteOperationWithCancellation_PropagatesCancellation()
{
// Arrange
CancellationToken receivedToken = default;
_choreographer.ExecuteOperationWithCancellation(
"cancellable-operation",
ct =>
{
receivedToken = ct;
return Task.CompletedTask;
});
using var cts = new CancellationTokenSource();
// Act
var result = await _choreographer.ExecuteAsync(cts.Token);
// Assert
Assert.True(result.Success);
Assert.Equal(cts.Token, receivedToken);
}
[Fact]
public async Task ExecuteAsync_AssertCondition_PassingAssertion_Succeeds()
{
// Arrange
_choreographer.AssertCondition(
"always-true",
() => Task.FromResult(true));
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.Single(result.Steps);
Assert.True(result.Steps[0].Success);
}
[Fact]
public async Task ExecuteAsync_AssertCondition_FailingAssertion_FailsAndStops()
{
// Arrange
var secondStepExecuted = false;
_choreographer
.AssertCondition("always-false", () => Task.FromResult(false))
.ExecuteOperation("should-not-run", () =>
{
secondStepExecuted = true;
return Task.CompletedTask;
});
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.False(result.Success);
Assert.Single(result.Steps); // Only first step executed
Assert.False(result.Steps[0].Success);
Assert.True(result.Steps[0].IsBlocking);
Assert.False(secondStepExecuted);
}
[Fact]
public async Task ExecuteAsync_OperationThrows_CapturesException()
{
// Arrange
var expectedException = new InvalidOperationException("Test error");
_choreographer.ExecuteOperation(
"failing-operation",
() => throw expectedException);
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success); // Execute steps don't block by default
Assert.Single(result.Steps);
Assert.False(result.Steps[0].Success);
Assert.Same(expectedException, result.Steps[0].Exception);
}
[Fact]
public async Task ExecuteAsync_WithCancellation_ThrowsOperationCanceled()
{
// Arrange
using var cts = new CancellationTokenSource();
_choreographer.ExecuteOperation(
"long-operation",
async () =>
{
await cts.CancelAsync();
cts.Token.ThrowIfCancellationRequested();
});
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(
() => _choreographer.ExecuteAsync(cts.Token));
}
[Fact]
public void Clear_RemovesAllSteps()
{
// Arrange
_choreographer
.InjectFailure("a", FailureType.Unavailable)
.InjectFailure("b", FailureType.Timeout)
.RecoverComponent("a");
Assert.Equal(3, _choreographer.StepCount);
// Act
_choreographer.Clear();
// Assert
Assert.Equal(0, _choreographer.StepCount);
}
[Fact]
public async Task ExecuteAsync_ComplexScenario_ExecutesInOrder()
{
// Arrange
var executionOrder = new List<string>();
_choreographer
.ExecuteOperation("step-1", () =>
{
executionOrder.Add("step-1");
return Task.CompletedTask;
})
.InjectFailure("postgres", FailureType.Unavailable)
.ExecuteOperation("step-2", () =>
{
executionOrder.Add("step-2");
return Task.CompletedTask;
})
.Wait(TimeSpan.FromSeconds(30))
.RecoverComponent("postgres")
.ExecuteOperation("step-3", () =>
{
executionOrder.Add("step-3");
return Task.CompletedTask;
})
.AssertCondition("final-check", () => Task.FromResult(true));
// Act
var result = await _choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.Equal(7, result.Steps.Length);
Assert.Equal(["step-1", "step-2", "step-3"], executionOrder);
}
[Fact]
public async Task ExecuteAsync_WithConvergenceTracker_CapturesState()
{
// Arrange
var tracker = new DefaultConvergenceTracker(
_timeProvider,
NullLogger<DefaultConvergenceTracker>.Instance);
var choreographer = new FailureChoreographer(
_registry,
_timeProvider,
NullLogger<FailureChoreographer>.Instance,
tracker);
tracker.RegisterProbe(new ComponentHealthProbe(_registry, "db"));
choreographer.InjectFailure("db", FailureType.Degraded);
// Act
var result = await choreographer.ExecuteAsync(TestContext.Current.CancellationToken);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.ConvergenceState);
Assert.False(result.ConvergenceState.HasConverged);
Assert.Single(result.ConvergenceState.UnhealthyComponents);
}
[Fact]
public void FluentChaining_ReturnsChoreographer()
{
// Act & Assert - verify fluent chaining works
var result = _choreographer
.InjectFailure("a", FailureType.Unavailable)
.RecoverComponent("a")
.Wait(TimeSpan.FromSeconds(1))
.ExecuteOperation("op", () => Task.CompletedTask)
.AssertCondition("check", () => Task.FromResult(true));
Assert.Same(_choreographer, result);
Assert.Equal(5, _choreographer.StepCount);
}
}

View File

@@ -0,0 +1,304 @@
// <copyright file="FailureInjectorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Testing.Chaos.Tests;
/// <summary>
/// Unit tests for failure injector implementations.
/// </summary>
public sealed class FailureInjectorTests
{
[Fact]
public async Task InMemoryFailureInjector_InjectFailure_SetsComponentUnhealthy()
{
// Arrange
var injector = new InMemoryFailureInjector("database");
// Act
await injector.InjectAsync("db-main", FailureType.Unavailable, TestContext.Current.CancellationToken);
// Assert
var health = await injector.GetHealthAsync("db-main", TestContext.Current.CancellationToken);
Assert.False(health.IsHealthy);
Assert.Equal(FailureType.Unavailable, health.CurrentFailure);
}
[Fact]
public async Task InMemoryFailureInjector_Recover_SetsComponentHealthy()
{
// Arrange
var injector = new InMemoryFailureInjector("cache");
await injector.InjectAsync("cache-1", FailureType.Timeout, TestContext.Current.CancellationToken);
// Act
await injector.RecoverAsync("cache-1", TestContext.Current.CancellationToken);
// Assert
var health = await injector.GetHealthAsync("cache-1", TestContext.Current.CancellationToken);
Assert.True(health.IsHealthy);
Assert.Equal(FailureType.None, health.CurrentFailure);
}
[Fact]
public async Task InMemoryFailureInjector_SimulateOperation_ThrowsWhenUnavailable()
{
// Arrange
var injector = new InMemoryFailureInjector("service");
await injector.InjectAsync("service-1", FailureType.Unavailable, TestContext.Current.CancellationToken);
// Act & Assert
await Assert.ThrowsAsync<InvalidOperationException>(
() => injector.SimulateOperationAsync("service-1", TestContext.Current.CancellationToken));
}
[Fact]
public async Task InMemoryFailureInjector_SimulateOperation_ThrowsTimeoutWhenTimeout()
{
// Arrange
var injector = new InMemoryFailureInjector("api");
await injector.InjectAsync("api-1", FailureType.Timeout, TestContext.Current.CancellationToken);
using var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(100));
// Act & Assert
// Should be cancelled before the 30-second delay completes
await Assert.ThrowsAnyAsync<OperationCanceledException>(
() => injector.SimulateOperationAsync("api-1", cts.Token));
}
[Fact]
public async Task InMemoryFailureInjector_SimulateOperation_SucceedsWhenNoFailure()
{
// Arrange
var injector = new InMemoryFailureInjector("service");
// Act & Assert - should not throw
await injector.SimulateOperationAsync("service-1", TestContext.Current.CancellationToken);
}
[Fact]
public async Task InMemoryFailureInjector_SimulateOperation_DegradedAddsDelay()
{
// Arrange
var injector = new InMemoryFailureInjector("service");
await injector.InjectAsync("service-1", FailureType.Degraded, TestContext.Current.CancellationToken);
var start = DateTimeOffset.UtcNow;
// Act
await injector.SimulateOperationAsync("service-1", TestContext.Current.CancellationToken);
// Assert - should have a noticeable delay
var elapsed = DateTimeOffset.UtcNow - start;
Assert.True(elapsed >= TimeSpan.FromMilliseconds(400)); // ~500ms delay
}
[Fact]
public void InMemoryFailureInjector_ComponentType_ReturnsConstructorValue()
{
// Arrange
var injector = new InMemoryFailureInjector("postgres");
// Assert
Assert.Equal("postgres", injector.ComponentType);
}
[Fact]
public async Task InMemoryFailureInjector_GetHealth_ReturnsComponentId()
{
// Arrange
var injector = new InMemoryFailureInjector("redis");
// Act
var health = await injector.GetHealthAsync("redis-main", TestContext.Current.CancellationToken);
// Assert
Assert.Equal("redis-main", health.ComponentId);
}
[Fact]
public async Task InMemoryFailureInjector_GetHealth_CapturesLastError()
{
// Arrange
var injector = new InMemoryFailureInjector("service");
await injector.InjectAsync("svc-1", FailureType.Unavailable, TestContext.Current.CancellationToken);
// Trigger the error
try
{
await injector.SimulateOperationAsync("svc-1", TestContext.Current.CancellationToken);
}
catch (InvalidOperationException)
{
// Expected
}
// Act
var health = await injector.GetHealthAsync("svc-1", TestContext.Current.CancellationToken);
// Assert
Assert.NotNull(health.LastError);
Assert.Contains("unavailable", health.LastError, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task InMemoryFailureInjector_GetActiveFailureIds_ReturnsActiveComponents()
{
// Arrange
var injector = new InMemoryFailureInjector("service");
await injector.InjectAsync("svc-1", FailureType.Unavailable, TestContext.Current.CancellationToken);
await injector.InjectAsync("svc-2", FailureType.Timeout, TestContext.Current.CancellationToken);
await injector.InjectAsync("svc-3", FailureType.Degraded, TestContext.Current.CancellationToken);
await injector.RecoverAsync("svc-2", TestContext.Current.CancellationToken); // Recover one
// Act
var activeIds = injector.GetActiveFailureIds();
// Assert
Assert.Equal(2, activeIds.Count);
Assert.Contains("svc-1", activeIds);
Assert.Contains("svc-3", activeIds);
Assert.DoesNotContain("svc-2", activeIds);
}
}
/// <summary>
/// Unit tests for <see cref="FailureInjectorRegistry"/>.
/// </summary>
public sealed class FailureInjectorRegistryTests
{
[Fact]
public void Register_AddsInjector()
{
// Arrange
var registry = new FailureInjectorRegistry();
var injector = new InMemoryFailureInjector("postgres");
// Act
registry.Register(injector);
// Assert
var retrieved = registry.GetInjector("postgres");
Assert.Same(injector, retrieved);
}
[Fact]
public void GetInjector_UnknownType_ReturnsNull()
{
// Arrange
var registry = new FailureInjectorRegistry();
// Act
var result = registry.GetInjector("unknown");
// Assert
Assert.Null(result);
}
[Fact]
public void GetOrCreateInjector_CreatesInMemoryInjector()
{
// Arrange
var registry = new FailureInjectorRegistry();
// Act
var injector = registry.GetOrCreateInjector("postgres-main");
// Assert
Assert.NotNull(injector);
Assert.IsType<InMemoryFailureInjector>(injector);
Assert.Equal("postgres", injector.ComponentType);
}
[Fact]
public void GetOrCreateInjector_ExtractsTypeFromId_WithDash()
{
// Arrange
var registry = new FailureInjectorRegistry();
// Act
var injector = registry.GetOrCreateInjector("redis-cache-primary");
// Assert
Assert.Equal("redis", injector.ComponentType);
}
[Fact]
public void GetOrCreateInjector_ExtractsTypeFromId_WithUnderscore()
{
// Arrange
var registry = new FailureInjectorRegistry();
// Act
var injector = registry.GetOrCreateInjector("mongo_replica_1");
// Assert
Assert.Equal("mongo", injector.ComponentType);
}
[Fact]
public void GetOrCreateInjector_ReturnsSameInjector_ForSameType()
{
// Arrange
var registry = new FailureInjectorRegistry();
// Act
var injector1 = registry.GetOrCreateInjector("postgres-main");
var injector2 = registry.GetOrCreateInjector("postgres-replica");
// Assert
Assert.Same(injector1, injector2);
}
[Fact]
public void GetOrCreateInjector_ReturnsRegisteredInjector_IfExists()
{
// Arrange
var registry = new FailureInjectorRegistry();
var customInjector = new InMemoryFailureInjector("custom");
registry.Register(customInjector);
// Act
var injector = registry.GetOrCreateInjector("custom-service");
// Assert
Assert.Same(customInjector, injector);
}
[Fact]
public async Task RecoverAllAsync_RecoversAllComponents()
{
// Arrange
var registry = new FailureInjectorRegistry();
var injector1 = registry.GetOrCreateInjector("postgres-main");
var injector2 = registry.GetOrCreateInjector("redis-cache");
await injector1.InjectAsync("postgres-main", FailureType.Unavailable, TestContext.Current.CancellationToken);
await injector2.InjectAsync("redis-cache", FailureType.Timeout, TestContext.Current.CancellationToken);
// Act
await registry.RecoverAllAsync(TestContext.Current.CancellationToken);
// Assert
var health1 = await injector1.GetHealthAsync("postgres-main", TestContext.Current.CancellationToken);
var health2 = await injector2.GetHealthAsync("redis-cache", TestContext.Current.CancellationToken);
Assert.True(health1.IsHealthy);
Assert.True(health2.IsHealthy);
}
[Fact]
public void Register_IsCaseInsensitive()
{
// Arrange
var registry = new FailureInjectorRegistry();
var injector = new InMemoryFailureInjector("PostgreSQL");
registry.Register(injector);
// Act
var retrieved = registry.GetInjector("postgresql");
// Assert
Assert.Same(injector, retrieved);
}
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio" PrivateAssets="all" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Testing.Chaos\StellaOps.Testing.Chaos.csproj" />
<ProjectReference Include="..\StellaOps.Testing.Temporal\StellaOps.Testing.Temporal.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,390 @@
// <copyright file="FailureChoreographer.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_003_TEST_failure_choreography
// Task: FCHR-002
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Testing.Temporal;
namespace StellaOps.Testing.Chaos;
/// <summary>
/// Orchestrates sequenced failure scenarios across dependencies.
/// </summary>
public sealed class FailureChoreographer
{
private readonly List<ChoreographyStep> _steps = [];
private readonly FailureInjectorRegistry _injectorRegistry;
private readonly SimulatedTimeProvider _timeProvider;
private readonly IConvergenceTracker? _convergenceTracker;
private readonly ILogger<FailureChoreographer> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="FailureChoreographer"/> class.
/// </summary>
/// <param name="injectorRegistry">Registry of failure injectors.</param>
/// <param name="timeProvider">Time provider for simulated time.</param>
/// <param name="logger">Logger instance.</param>
/// <param name="convergenceTracker">Optional convergence tracker.</param>
public FailureChoreographer(
FailureInjectorRegistry injectorRegistry,
SimulatedTimeProvider timeProvider,
ILogger<FailureChoreographer> logger,
IConvergenceTracker? convergenceTracker = null)
{
_injectorRegistry = injectorRegistry;
_timeProvider = timeProvider;
_logger = logger;
_convergenceTracker = convergenceTracker;
}
/// <summary>
/// Add a step to inject a failure.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <param name="failureType">Type of failure to inject.</param>
/// <param name="delay">Delay before executing this step.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer InjectFailure(
string componentId,
FailureType failureType,
TimeSpan? delay = null)
{
_steps.Add(new ChoreographyStep(
StepType.InjectFailure,
componentId,
failureType,
delay ?? TimeSpan.Zero));
return this;
}
/// <summary>
/// Add a step to recover a component.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <param name="delay">Delay before executing this step.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer RecoverComponent(
string componentId,
TimeSpan? delay = null)
{
_steps.Add(new ChoreographyStep(
StepType.Recover,
componentId,
FailureType.None,
delay ?? TimeSpan.Zero));
return this;
}
/// <summary>
/// Add a step to execute an operation during the scenario.
/// </summary>
/// <param name="operationName">Name of the operation.</param>
/// <param name="operation">Operation to execute.</param>
/// <param name="delay">Delay before executing this step.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer ExecuteOperation(
string operationName,
Func<Task> operation,
TimeSpan? delay = null)
{
_steps.Add(new ChoreographyStep(
StepType.Execute,
operationName,
FailureType.None,
delay ?? TimeSpan.Zero)
{
Operation = _ => operation()
});
return this;
}
/// <summary>
/// Add a step to execute an operation with cancellation support.
/// </summary>
/// <param name="operationName">Name of the operation.</param>
/// <param name="operation">Operation to execute.</param>
/// <param name="delay">Delay before executing this step.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer ExecuteOperationWithCancellation(
string operationName,
Func<CancellationToken, Task> operation,
TimeSpan? delay = null)
{
_steps.Add(new ChoreographyStep(
StepType.Execute,
operationName,
FailureType.None,
delay ?? TimeSpan.Zero)
{
Operation = operation
});
return this;
}
/// <summary>
/// Add a step to assert a condition.
/// </summary>
/// <param name="conditionName">Name of the condition.</param>
/// <param name="condition">Condition to assert.</param>
/// <param name="delay">Delay before executing this step.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer AssertCondition(
string conditionName,
Func<Task<bool>> condition,
TimeSpan? delay = null)
{
_steps.Add(new ChoreographyStep(
StepType.Assert,
conditionName,
FailureType.None,
delay ?? TimeSpan.Zero)
{
Condition = _ => condition(),
AssertionDescription = conditionName
});
return this;
}
/// <summary>
/// Add a step to assert a condition with cancellation support.
/// </summary>
/// <param name="conditionName">Name of the condition.</param>
/// <param name="condition">Condition to assert.</param>
/// <param name="delay">Delay before executing this step.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer AssertConditionWithCancellation(
string conditionName,
Func<CancellationToken, Task<bool>> condition,
TimeSpan? delay = null)
{
_steps.Add(new ChoreographyStep(
StepType.Assert,
conditionName,
FailureType.None,
delay ?? TimeSpan.Zero)
{
Condition = condition,
AssertionDescription = conditionName
});
return this;
}
/// <summary>
/// Add a step to wait for a duration.
/// </summary>
/// <param name="duration">Duration to wait.</param>
/// <returns>This choreographer for chaining.</returns>
public FailureChoreographer Wait(TimeSpan duration)
{
_steps.Add(new ChoreographyStep(
StepType.Wait,
"wait",
FailureType.None,
duration));
return this;
}
/// <summary>
/// Execute the choreographed failure scenario.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>The choreography result.</returns>
public async Task<ChoreographyResult> ExecuteAsync(CancellationToken ct = default)
{
var stepResults = new List<ChoreographyStepResult>();
var startTime = _timeProvider.GetUtcNow();
var stepIndex = 0;
_logger.LogInformation(
"Starting failure choreography with {StepCount} steps",
_steps.Count);
foreach (var step in _steps)
{
ct.ThrowIfCancellationRequested();
stepIndex++;
// Apply delay (advance simulated time)
if (step.Delay > TimeSpan.Zero)
{
_timeProvider.Advance(step.Delay);
_logger.LogDebug(
"Step {StepIndex}: Delayed {Delay}",
stepIndex, step.Delay);
}
var stepStart = _timeProvider.GetUtcNow();
var result = await ExecuteStepAsync(step, stepIndex, ct);
result = result with
{
Timestamp = stepStart,
Duration = _timeProvider.GetUtcNow() - stepStart
};
stepResults.Add(result);
_logger.LogInformation(
"Step {StepIndex} {StepType} '{ComponentId}': {Status}",
stepIndex, step.StepType, step.ComponentId,
result.Success ? "Success" : "Failed");
if (!result.Success && result.IsBlocking)
{
_logger.LogWarning(
"Step {StepIndex} failed and is blocking. Stopping choreography.",
stepIndex);
break;
}
}
var convergenceState = await CaptureConvergenceStateAsync(ct);
var totalDuration = _timeProvider.GetUtcNow() - startTime;
var success = stepResults.All(r => r.Success || !r.IsBlocking);
_logger.LogInformation(
"Choreography completed: {Status} in {Duration}",
success ? "Success" : "Failed", totalDuration);
return new ChoreographyResult(
Success: success,
Steps: [.. stepResults],
TotalDuration: totalDuration,
ConvergenceState: convergenceState);
}
private async Task<ChoreographyStepResult> ExecuteStepAsync(
ChoreographyStep step,
int stepIndex,
CancellationToken ct)
{
try
{
switch (step.StepType)
{
case StepType.InjectFailure:
await InjectFailureAsync(step.ComponentId, step.FailureType, ct);
return new ChoreographyStepResult(step.ComponentId, true, step.StepType);
case StepType.Recover:
await RecoverComponentAsync(step.ComponentId, ct);
return new ChoreographyStepResult(step.ComponentId, true, step.StepType);
case StepType.Execute:
await step.Operation!(ct);
return new ChoreographyStepResult(step.ComponentId, true, step.StepType);
case StepType.Assert:
var passed = await step.Condition!(ct);
if (!passed)
{
_logger.LogWarning(
"Assertion '{Assertion}' failed at step {StepIndex}",
step.AssertionDescription, stepIndex);
}
return new ChoreographyStepResult(
step.ComponentId, passed, step.StepType, IsBlocking: true);
case StepType.Wait:
// Time already advanced in delay handling
return new ChoreographyStepResult(step.ComponentId, true, step.StepType);
default:
throw new InvalidOperationException($"Unknown step type: {step.StepType}");
}
}
catch (OperationCanceledException)
{
throw; // Re-throw cancellation
}
catch (Exception ex)
{
_logger.LogError(ex,
"Step {StepIndex} {StepType} '{ComponentId}' threw exception",
stepIndex, step.StepType, step.ComponentId);
return new ChoreographyStepResult(
step.ComponentId,
false,
step.StepType,
Exception: ex,
IsBlocking: step.StepType == StepType.Assert);
}
}
private async Task InjectFailureAsync(
string componentId,
FailureType failureType,
CancellationToken ct)
{
var injector = _injectorRegistry.GetOrCreateInjector(componentId);
await injector.InjectAsync(componentId, failureType, ct);
_logger.LogInformation(
"Injected {FailureType} failure into {ComponentId}",
failureType, componentId);
}
private async Task RecoverComponentAsync(string componentId, CancellationToken ct)
{
var injector = _injectorRegistry.GetOrCreateInjector(componentId);
await injector.RecoverAsync(componentId, ct);
_logger.LogInformation("Recovered component {ComponentId}", componentId);
}
private async Task<ConvergenceState?> CaptureConvergenceStateAsync(CancellationToken ct)
{
if (_convergenceTracker is null)
{
return null;
}
try
{
var snapshot = await _convergenceTracker.CaptureSnapshotAsync(ct);
var healthyComponents = snapshot.ProbeResults
.Where(p => p.Value.IsHealthy)
.Select(p => p.Key)
.ToImmutableArray();
var unhealthyComponents = snapshot.ProbeResults
.Where(p => !p.Value.IsHealthy)
.Select(p => p.Key)
.ToImmutableArray();
var anomalies = snapshot.ProbeResults
.SelectMany(p => p.Value.Anomalies)
.ToImmutableArray();
return new ConvergenceState(
HasConverged: unhealthyComponents.Length == 0 && anomalies.Length == 0,
HealthyComponents: healthyComponents,
UnhealthyComponents: unhealthyComponents,
Anomalies: anomalies);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to capture convergence state");
return null;
}
}
/// <summary>
/// Clear all steps from the choreographer.
/// </summary>
public void Clear()
{
_steps.Clear();
}
/// <summary>
/// Gets the number of steps in the choreography.
/// </summary>
public int StepCount => _steps.Count;
}

View File

@@ -0,0 +1,388 @@
// <copyright file="IConvergenceTracker.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_003_TEST_failure_choreography
// Task: FCHR-003, FCHR-007, FCHR-008
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
namespace StellaOps.Testing.Chaos;
/// <summary>
/// Tracks system convergence after failure scenarios.
/// </summary>
public interface IConvergenceTracker
{
/// <summary>
/// Capture a snapshot of the current system state.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>System state snapshot.</returns>
Task<SystemStateSnapshot> CaptureSnapshotAsync(CancellationToken ct = default);
/// <summary>
/// Wait for system to converge to expected state.
/// </summary>
/// <param name="expectations">Convergence expectations.</param>
/// <param name="timeout">Maximum time to wait.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Convergence result.</returns>
Task<ConvergenceResult> WaitForConvergenceAsync(
ConvergenceExpectations expectations,
TimeSpan timeout,
CancellationToken ct = default);
/// <summary>
/// Register a probe for monitoring system state.
/// </summary>
/// <param name="probe">The probe to register.</param>
void RegisterProbe(IStateProbe probe);
/// <summary>
/// Unregister a probe.
/// </summary>
/// <param name="probeName">Name of the probe to unregister.</param>
void UnregisterProbe(string probeName);
}
/// <summary>
/// Probes system state for convergence tracking.
/// </summary>
public interface IStateProbe
{
/// <summary>
/// Gets the name of this probe.
/// </summary>
string Name { get; }
/// <summary>
/// Probe the current state.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>Probe result.</returns>
Task<ProbeResult> ProbeAsync(CancellationToken ct = default);
}
/// <summary>
/// Default implementation of convergence tracker.
/// </summary>
public sealed class DefaultConvergenceTracker : IConvergenceTracker
{
private readonly Dictionary<string, IStateProbe> _probes = new(StringComparer.OrdinalIgnoreCase);
private readonly TimeProvider _timeProvider;
private readonly ILogger<DefaultConvergenceTracker> _logger;
private readonly TimeSpan _pollInterval;
/// <summary>
/// Initializes a new instance of the <see cref="DefaultConvergenceTracker"/> class.
/// </summary>
/// <param name="timeProvider">Time provider.</param>
/// <param name="logger">Logger instance.</param>
/// <param name="pollInterval">Interval between convergence checks.</param>
public DefaultConvergenceTracker(
TimeProvider timeProvider,
ILogger<DefaultConvergenceTracker> logger,
TimeSpan? pollInterval = null)
{
_timeProvider = timeProvider;
_logger = logger;
_pollInterval = pollInterval ?? TimeSpan.FromMilliseconds(100);
}
/// <inheritdoc/>
public async Task<SystemStateSnapshot> CaptureSnapshotAsync(CancellationToken ct = default)
{
var results = new Dictionary<string, ProbeResult>();
foreach (var (name, probe) in _probes)
{
try
{
var result = await probe.ProbeAsync(ct);
results[name] = result;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Probe '{ProbeName}' failed", name);
results[name] = new ProbeResult(
IsHealthy: false,
Metrics: ImmutableDictionary<string, object>.Empty,
Anomalies: [$"Probe failed: {ex.Message}"]);
}
}
return new SystemStateSnapshot(
CapturedAt: _timeProvider.GetUtcNow(),
ProbeResults: results.ToImmutableDictionary());
}
/// <inheritdoc/>
public async Task<ConvergenceResult> WaitForConvergenceAsync(
ConvergenceExpectations expectations,
TimeSpan timeout,
CancellationToken ct = default)
{
var startTime = _timeProvider.GetUtcNow();
var deadline = startTime + timeout;
var attempts = 0;
var violations = new List<string>();
var maxAttempts = Math.Max(1, (int)(timeout.TotalMilliseconds / Math.Max(1, _pollInterval.TotalMilliseconds)) + 1);
_logger.LogDebug(
"Waiting for convergence with timeout {Timeout}",
timeout);
while (attempts < maxAttempts)
{
ct.ThrowIfCancellationRequested();
attempts++;
var snapshot = await CaptureSnapshotAsync(ct);
violations = CheckExpectations(snapshot, expectations);
if (violations.Count == 0)
{
var elapsed = _timeProvider.GetUtcNow() - startTime;
_logger.LogInformation(
"System converged after {Attempts} attempts in {Elapsed}",
attempts, elapsed);
return new ConvergenceResult(
HasConverged: true,
Violations: [],
ConvergenceAttempts: attempts,
TimeToConverge: elapsed);
}
_logger.LogDebug(
"Convergence attempt {Attempt}: {ViolationCount} violations",
attempts, violations.Count);
// Use Task.Yield for very short intervals to avoid blocking
if (_pollInterval <= TimeSpan.FromMilliseconds(1))
{
await Task.Yield();
}
else
{
await Task.Delay(_pollInterval, ct);
}
}
_logger.LogWarning(
"Convergence timeout after {Attempts} attempts. Violations: {Violations}",
attempts, string.Join(", ", violations));
return new ConvergenceResult(
HasConverged: false,
Violations: [.. violations],
ConvergenceAttempts: attempts,
TimeToConverge: null);
}
/// <inheritdoc/>
public void RegisterProbe(IStateProbe probe)
{
_probes[probe.Name] = probe;
_logger.LogDebug("Registered probe '{ProbeName}'", probe.Name);
}
/// <inheritdoc/>
public void UnregisterProbe(string probeName)
{
if (_probes.Remove(probeName))
{
_logger.LogDebug("Unregistered probe '{ProbeName}'", probeName);
}
}
private List<string> CheckExpectations(
SystemStateSnapshot snapshot,
ConvergenceExpectations expectations)
{
var violations = new List<string>();
// Check all healthy requirement
if (expectations.RequireAllHealthy)
{
var unhealthy = snapshot.ProbeResults
.Where(p => !p.Value.IsHealthy)
.Select(p => p.Key)
.ToList();
if (unhealthy.Count > 0)
{
violations.Add($"Unhealthy components: {string.Join(", ", unhealthy)}");
}
}
// Check specific required healthy components
if (!expectations.RequiredHealthyComponents.IsDefaultOrEmpty)
{
foreach (var required in expectations.RequiredHealthyComponents)
{
if (!snapshot.ProbeResults.TryGetValue(required, out var result))
{
violations.Add($"Required component '{required}' not found");
}
else if (!result.IsHealthy)
{
violations.Add($"Required component '{required}' is unhealthy");
}
}
}
// Check for anomalies
var allAnomalies = snapshot.ProbeResults
.SelectMany(p => p.Value.Anomalies)
.ToList();
if (allAnomalies.Count > 0 && expectations.RequireNoOrphanedResources)
{
var orphanAnomalies = allAnomalies
.Where(a => a.Contains("orphan", StringComparison.OrdinalIgnoreCase))
.ToList();
if (orphanAnomalies.Count > 0)
{
violations.Add($"Orphaned resources detected: {string.Join(", ", orphanAnomalies)}");
}
}
// Check metric validators
if (expectations.MetricValidators is not null)
{
foreach (var (metricName, validator) in expectations.MetricValidators)
{
var metricValue = snapshot.ProbeResults
.SelectMany(p => p.Value.Metrics)
.FirstOrDefault(m => m.Key == metricName);
if (metricValue.Value is not null && !validator(metricValue.Value))
{
violations.Add($"Metric '{metricName}' failed validation");
}
}
}
return violations;
}
}
/// <summary>
/// Health check probe for components managed by failure injectors.
/// </summary>
public sealed class ComponentHealthProbe : IStateProbe
{
private readonly FailureInjectorRegistry _registry;
private readonly string _componentId;
/// <summary>
/// Initializes a new instance of the <see cref="ComponentHealthProbe"/> class.
/// </summary>
/// <param name="registry">Failure injector registry.</param>
/// <param name="componentId">Component to monitor.</param>
public ComponentHealthProbe(FailureInjectorRegistry registry, string componentId)
{
_registry = registry;
_componentId = componentId;
}
/// <inheritdoc/>
public string Name => $"component:{_componentId}";
/// <inheritdoc/>
public async Task<ProbeResult> ProbeAsync(CancellationToken ct = default)
{
var injector = _registry.GetOrCreateInjector(_componentId);
var health = await injector.GetHealthAsync(_componentId, ct);
return new ProbeResult(
IsHealthy: health.IsHealthy,
Metrics: health.Metrics,
Anomalies: health.LastError is not null
? [health.LastError]
: []);
}
}
/// <summary>
/// Custom probe that executes a delegate.
/// </summary>
public sealed class DelegateProbe : IStateProbe
{
private readonly Func<CancellationToken, Task<ProbeResult>> _probeFunc;
/// <summary>
/// Initializes a new instance of the <see cref="DelegateProbe"/> class.
/// </summary>
/// <param name="name">Probe name.</param>
/// <param name="probeFunc">Probe function.</param>
public DelegateProbe(string name, Func<CancellationToken, Task<ProbeResult>> probeFunc)
{
Name = name;
_probeFunc = probeFunc;
}
/// <inheritdoc/>
public string Name { get; }
/// <inheritdoc/>
public Task<ProbeResult> ProbeAsync(CancellationToken ct = default)
{
return _probeFunc(ct);
}
}
/// <summary>
/// Aggregates multiple probes into a single logical probe.
/// </summary>
public sealed class AggregateProbe : IStateProbe
{
private readonly IReadOnlyList<IStateProbe> _probes;
/// <summary>
/// Initializes a new instance of the <see cref="AggregateProbe"/> class.
/// </summary>
/// <param name="name">Probe name.</param>
/// <param name="probes">Probes to aggregate.</param>
public AggregateProbe(string name, IReadOnlyList<IStateProbe> probes)
{
Name = name;
_probes = probes;
}
/// <inheritdoc/>
public string Name { get; }
/// <inheritdoc/>
public async Task<ProbeResult> ProbeAsync(CancellationToken ct = default)
{
var isHealthy = true;
var metrics = new Dictionary<string, object>();
var anomalies = new List<string>();
foreach (var probe in _probes)
{
var result = await probe.ProbeAsync(ct);
isHealthy = isHealthy && result.IsHealthy;
foreach (var (key, value) in result.Metrics)
{
metrics[$"{probe.Name}:{key}"] = value;
}
foreach (var anomaly in result.Anomalies)
{
anomalies.Add($"{probe.Name}: {anomaly}");
}
}
return new ProbeResult(
IsHealthy: isHealthy,
Metrics: metrics.ToImmutableDictionary(),
Anomalies: [.. anomalies]);
}
}

View File

@@ -0,0 +1,278 @@
// <copyright file="IFailureInjector.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_003_TEST_failure_choreography
// Task: FCHR-004, FCHR-005, FCHR-006
using System.Collections.Concurrent;
using System.Collections.Immutable;
namespace StellaOps.Testing.Chaos;
/// <summary>
/// Injects failures into a specific component type.
/// </summary>
public interface IFailureInjector
{
/// <summary>
/// Gets the component type this injector handles.
/// </summary>
string ComponentType { get; }
/// <summary>
/// Inject a failure into the specified component.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <param name="failureType">Type of failure to inject.</param>
/// <param name="ct">Cancellation token.</param>
Task InjectAsync(string componentId, FailureType failureType, CancellationToken ct = default);
/// <summary>
/// Recover a component from failure.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <param name="ct">Cancellation token.</param>
Task RecoverAsync(string componentId, CancellationToken ct = default);
/// <summary>
/// Get the health status of a component.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Component health status.</returns>
Task<ComponentHealth> GetHealthAsync(string componentId, CancellationToken ct = default);
}
/// <summary>
/// Base class for failure injectors with common functionality.
/// </summary>
public abstract class FailureInjectorBase : IFailureInjector
{
/// <summary>
/// Active failures by component ID.
/// </summary>
protected readonly ConcurrentDictionary<string, FailureType> ActiveFailures = new();
/// <summary>
/// Last error by component ID.
/// </summary>
protected readonly ConcurrentDictionary<string, string?> LastErrors = new();
/// <inheritdoc/>
public abstract string ComponentType { get; }
/// <inheritdoc/>
public virtual Task InjectAsync(string componentId, FailureType failureType, CancellationToken ct = default)
{
ActiveFailures[componentId] = failureType;
return Task.CompletedTask;
}
/// <inheritdoc/>
public virtual Task RecoverAsync(string componentId, CancellationToken ct = default)
{
ActiveFailures.TryRemove(componentId, out _);
LastErrors.TryRemove(componentId, out _);
return Task.CompletedTask;
}
/// <inheritdoc/>
public virtual Task<ComponentHealth> GetHealthAsync(string componentId, CancellationToken ct = default)
{
var hasFailure = ActiveFailures.TryGetValue(componentId, out var failureType);
LastErrors.TryGetValue(componentId, out var lastError);
return Task.FromResult(new ComponentHealth(
ComponentId: componentId,
IsHealthy: !hasFailure || failureType == FailureType.None,
CurrentFailure: hasFailure ? failureType : FailureType.None,
LastError: lastError,
Metrics: GetComponentMetrics(componentId)));
}
/// <summary>
/// Get component-specific metrics.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <returns>Metrics dictionary.</returns>
protected virtual ImmutableDictionary<string, object> GetComponentMetrics(string componentId)
{
return ImmutableDictionary<string, object>.Empty;
}
/// <summary>
/// Check if a failure is currently active for a component.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <returns>True if failure is active.</returns>
protected bool IsFailureActive(string componentId)
{
return ActiveFailures.TryGetValue(componentId, out var ft) && ft != FailureType.None;
}
/// <summary>
/// Get the current failure type for a component.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <returns>Current failure type.</returns>
protected FailureType GetCurrentFailure(string componentId)
{
return ActiveFailures.TryGetValue(componentId, out var ft) ? ft : FailureType.None;
}
/// <summary>
/// Gets the IDs of all components with active failures.
/// </summary>
/// <returns>Collection of component IDs with active failures.</returns>
public IReadOnlyCollection<string> GetActiveFailureIds()
{
return ActiveFailures.Keys.ToList().AsReadOnly();
}
}
/// <summary>
/// In-memory failure injector for testing without real infrastructure.
/// </summary>
public sealed class InMemoryFailureInjector : FailureInjectorBase
{
private readonly string _componentType;
/// <summary>
/// Initializes a new instance of the <see cref="InMemoryFailureInjector"/> class.
/// </summary>
/// <param name="componentType">The component type this injector handles.</param>
public InMemoryFailureInjector(string componentType)
{
_componentType = componentType;
}
/// <inheritdoc/>
public override string ComponentType => _componentType;
/// <summary>
/// Simulates an operation that may fail based on current injection state.
/// </summary>
/// <param name="componentId">Component identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <exception cref="InvalidOperationException">Thrown when component is unavailable.</exception>
/// <exception cref="TimeoutException">Thrown when component times out.</exception>
public async Task SimulateOperationAsync(string componentId, CancellationToken ct = default)
{
var failureType = GetCurrentFailure(componentId);
switch (failureType)
{
case FailureType.None:
// Normal operation
return;
case FailureType.Unavailable:
LastErrors[componentId] = "Component unavailable";
throw new InvalidOperationException($"{ComponentType} {componentId} is unavailable");
case FailureType.Timeout:
LastErrors[componentId] = "Operation timed out";
await Task.Delay(TimeSpan.FromSeconds(30), ct); // Will likely be cancelled
throw new TimeoutException($"{ComponentType} {componentId} timed out");
case FailureType.Intermittent:
if (Random.Shared.NextDouble() < 0.5)
{
LastErrors[componentId] = "Intermittent failure";
throw new InvalidOperationException($"{ComponentType} {componentId} failed intermittently");
}
break;
case FailureType.PartialFailure:
// Depends on operation type - caller decides
break;
case FailureType.Degraded:
// Slow but works
await Task.Delay(TimeSpan.FromMilliseconds(500), ct);
break;
case FailureType.CorruptResponse:
// Return but caller should check data validity
break;
case FailureType.Flapping:
// Alternates based on time
var tick = DateTimeOffset.UtcNow.Ticks / TimeSpan.TicksPerSecond;
if (tick % 2 == 0)
{
LastErrors[componentId] = "Component flapping (down phase)";
throw new InvalidOperationException($"{ComponentType} {componentId} is down (flapping)");
}
break;
}
}
}
/// <summary>
/// Registry of failure injectors by component type.
/// </summary>
public sealed class FailureInjectorRegistry
{
private readonly Dictionary<string, IFailureInjector> _injectors = new(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Register a failure injector.
/// </summary>
/// <param name="injector">The injector to register.</param>
public void Register(IFailureInjector injector)
{
_injectors[injector.ComponentType] = injector;
}
/// <summary>
/// Get the injector for a component type.
/// </summary>
/// <param name="componentType">The component type.</param>
/// <returns>The failure injector.</returns>
public IFailureInjector? GetInjector(string componentType)
{
return _injectors.TryGetValue(componentType, out var injector) ? injector : null;
}
/// <summary>
/// Get or create an in-memory injector for a component.
/// </summary>
/// <param name="componentId">Component identifier (used to derive type).</param>
/// <returns>A failure injector.</returns>
public IFailureInjector GetOrCreateInjector(string componentId)
{
// Extract component type from ID (e.g., "postgres-main" -> "postgres")
var componentType = componentId.Split('-', '_')[0];
if (!_injectors.TryGetValue(componentType, out var injector))
{
injector = new InMemoryFailureInjector(componentType);
_injectors[componentType] = injector;
}
return injector;
}
/// <summary>
/// Recover all components.
/// </summary>
/// <param name="ct">Cancellation token.</param>
public async Task RecoverAllAsync(CancellationToken ct = default)
{
foreach (var injector in _injectors.Values)
{
// Get all active failures and recover them
if (injector is FailureInjectorBase baseInjector)
{
var activeIds = baseInjector.GetActiveFailureIds();
foreach (var id in activeIds)
{
await injector.RecoverAsync(id, ct);
}
}
}
}
}

View File

@@ -0,0 +1,225 @@
// <copyright file="Models.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_003_TEST_failure_choreography
// Task: FCHR-001
using System.Collections.Immutable;
namespace StellaOps.Testing.Chaos;
/// <summary>
/// Type of failure to inject into a component.
/// </summary>
public enum FailureType
{
/// <summary>
/// No failure (component working normally).
/// </summary>
None,
/// <summary>
/// Component completely unavailable.
/// </summary>
Unavailable,
/// <summary>
/// Component responds slowly, eventually times out.
/// </summary>
Timeout,
/// <summary>
/// Component fails randomly at configurable rate.
/// </summary>
Intermittent,
/// <summary>
/// Some operations fail, others succeed.
/// </summary>
PartialFailure,
/// <summary>
/// Component works but at reduced capacity/speed.
/// </summary>
Degraded,
/// <summary>
/// Component returns invalid or corrupted data.
/// </summary>
CorruptResponse,
/// <summary>
/// Component alternates between up and down rapidly.
/// </summary>
Flapping
}
/// <summary>
/// Type of choreography step.
/// </summary>
public enum StepType
{
/// <summary>
/// Inject a failure into a component.
/// </summary>
InjectFailure,
/// <summary>
/// Recover a component from failure.
/// </summary>
Recover,
/// <summary>
/// Execute an operation during the scenario.
/// </summary>
Execute,
/// <summary>
/// Assert a condition is met.
/// </summary>
Assert,
/// <summary>
/// Wait for a duration (simulated time).
/// </summary>
Wait
}
/// <summary>
/// A step in a failure choreography sequence.
/// </summary>
/// <param name="StepType">Type of step to execute.</param>
/// <param name="ComponentId">Identifier of the component involved.</param>
/// <param name="FailureType">Type of failure to inject (for InjectFailure steps).</param>
/// <param name="Delay">Delay before executing this step.</param>
public sealed record ChoreographyStep(
StepType StepType,
string ComponentId,
FailureType FailureType,
TimeSpan Delay)
{
/// <summary>
/// Gets or sets the operation to execute (for Execute steps).
/// </summary>
public Func<CancellationToken, Task>? Operation { get; init; }
/// <summary>
/// Gets or sets the condition to assert (for Assert steps).
/// </summary>
public Func<CancellationToken, Task<bool>>? Condition { get; init; }
/// <summary>
/// Gets or sets the assertion description.
/// </summary>
public string? AssertionDescription { get; init; }
}
/// <summary>
/// Result of executing a choreography step.
/// </summary>
/// <param name="ComponentId">Identifier of the component involved.</param>
/// <param name="Success">Whether the step succeeded.</param>
/// <param name="StepType">Type of step executed.</param>
/// <param name="Timestamp">When the step was executed.</param>
/// <param name="Exception">Exception if the step failed.</param>
/// <param name="IsBlocking">Whether failure of this step blocks subsequent steps.</param>
/// <param name="Duration">How long the step took.</param>
public sealed record ChoreographyStepResult(
string ComponentId,
bool Success,
StepType StepType,
DateTimeOffset Timestamp = default,
Exception? Exception = null,
bool IsBlocking = false,
TimeSpan Duration = default);
/// <summary>
/// Result of executing a complete choreography.
/// </summary>
/// <param name="Success">Whether the choreography succeeded.</param>
/// <param name="Steps">Results for each step.</param>
/// <param name="TotalDuration">Total duration of the choreography.</param>
/// <param name="ConvergenceState">Final convergence state, if captured.</param>
public sealed record ChoreographyResult(
bool Success,
ImmutableArray<ChoreographyStepResult> Steps,
TimeSpan TotalDuration,
ConvergenceState? ConvergenceState);
/// <summary>
/// State of system convergence after failure choreography.
/// </summary>
/// <param name="HasConverged">Whether the system has converged.</param>
/// <param name="HealthyComponents">List of healthy component IDs.</param>
/// <param name="UnhealthyComponents">List of unhealthy component IDs.</param>
/// <param name="Anomalies">List of detected anomalies.</param>
public sealed record ConvergenceState(
bool HasConverged,
ImmutableArray<string> HealthyComponents,
ImmutableArray<string> UnhealthyComponents,
ImmutableArray<string> Anomalies);
/// <summary>
/// Health status of a component.
/// </summary>
/// <param name="ComponentId">Component identifier.</param>
/// <param name="IsHealthy">Whether the component is healthy.</param>
/// <param name="CurrentFailure">Current failure type if any.</param>
/// <param name="LastError">Last error encountered.</param>
/// <param name="Metrics">Component-specific metrics.</param>
public sealed record ComponentHealth(
string ComponentId,
bool IsHealthy,
FailureType CurrentFailure,
string? LastError,
ImmutableDictionary<string, object> Metrics);
/// <summary>
/// Result of probing system state.
/// </summary>
/// <param name="IsHealthy">Whether the probed aspect is healthy.</param>
/// <param name="Metrics">Captured metrics.</param>
/// <param name="Anomalies">Detected anomalies.</param>
public sealed record ProbeResult(
bool IsHealthy,
ImmutableDictionary<string, object> Metrics,
ImmutableArray<string> Anomalies);
/// <summary>
/// Snapshot of system state at a point in time.
/// </summary>
/// <param name="CapturedAt">When the snapshot was taken.</param>
/// <param name="ProbeResults">Results from each probe.</param>
public sealed record SystemStateSnapshot(
DateTimeOffset CapturedAt,
ImmutableDictionary<string, ProbeResult> ProbeResults);
/// <summary>
/// Expectations for system convergence.
/// </summary>
/// <param name="RequireAllHealthy">All components must be healthy.</param>
/// <param name="RequireNoOrphanedResources">No orphaned resources allowed.</param>
/// <param name="RequireMetricsAccurate">Metrics must reflect actual state.</param>
/// <param name="RequireNoDataLoss">No data loss allowed.</param>
/// <param name="RequiredHealthyComponents">Specific components that must be healthy.</param>
/// <param name="MetricValidators">Custom metric validators.</param>
public sealed record ConvergenceExpectations(
bool RequireAllHealthy = true,
bool RequireNoOrphanedResources = true,
bool RequireMetricsAccurate = true,
bool RequireNoDataLoss = true,
ImmutableArray<string> RequiredHealthyComponents = default,
ImmutableDictionary<string, Func<object, bool>>? MetricValidators = null);
/// <summary>
/// Result of convergence verification.
/// </summary>
/// <param name="HasConverged">Whether the system has converged.</param>
/// <param name="Violations">List of expectation violations.</param>
/// <param name="ConvergenceAttempts">Number of attempts to verify convergence.</param>
/// <param name="TimeToConverge">Time taken to converge, if successful.</param>
public sealed record ConvergenceResult(
bool HasConverged,
ImmutableArray<string> Violations,
int ConvergenceAttempts = 1,
TimeSpan? TimeToConverge = null);

View File

@@ -0,0 +1,30 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Failure choreography and cascading resilience testing framework</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.Chaos.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Testing.Temporal\StellaOps.Testing.Temporal.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,355 @@
// <copyright file="ConfigDiffTestBase.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-019
using System.Collections.Immutable;
using System.Globalization;
using FluentAssertions;
using Microsoft.Extensions.Logging;
namespace StellaOps.Testing.ConfigDiff;
/// <summary>
/// Base class for tests that verify config changes produce expected behavioral deltas.
/// </summary>
public abstract class ConfigDiffTestBase
{
private readonly ILogger _logger;
private readonly ConfigDiffTestConfig _config;
/// <summary>
/// Initializes a new instance of the <see cref="ConfigDiffTestBase"/> class.
/// </summary>
/// <param name="config">Test configuration.</param>
/// <param name="logger">Logger instance.</param>
protected ConfigDiffTestBase(ConfigDiffTestConfig? config = null, ILogger? logger = null)
{
_config = config ?? new ConfigDiffTestConfig();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger.Instance;
}
/// <summary>
/// Test that changing only config (no code) produces expected behavioral delta.
/// </summary>
/// <typeparam name="TConfig">Type of configuration.</typeparam>
/// <typeparam name="TBehavior">Type of behavior snapshot.</typeparam>
/// <param name="baselineConfig">Baseline configuration.</param>
/// <param name="changedConfig">Changed configuration.</param>
/// <param name="getBehavior">Function to capture behavior from configuration.</param>
/// <param name="computeDelta">Function to compute delta between behaviors.</param>
/// <param name="expectedDelta">Expected behavioral delta.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Test result.</returns>
protected async Task<ConfigDiffTestResult> TestConfigBehavioralDeltaAsync<TConfig, TBehavior>(
TConfig baselineConfig,
TConfig changedConfig,
Func<TConfig, Task<TBehavior>> getBehavior,
Func<TBehavior, TBehavior, ConfigDelta> computeDelta,
ConfigDelta expectedDelta,
CancellationToken ct = default)
where TConfig : notnull
where TBehavior : notnull
{
_logger.LogInformation("Testing config behavioral delta");
// Get behavior with baseline config
var baselineBehavior = await getBehavior(baselineConfig);
_logger.LogDebug("Captured baseline behavior");
// Get behavior with changed config
var changedBehavior = await getBehavior(changedConfig);
_logger.LogDebug("Captured changed behavior");
// Compute actual delta
var actualDelta = computeDelta(baselineBehavior, changedBehavior);
_logger.LogDebug("Computed delta: {ChangedCount} behaviors changed", actualDelta.ChangedBehaviors.Length);
// Compare expected vs actual
return AssertDeltaMatches(actualDelta, expectedDelta);
}
/// <summary>
/// Test that config change does not affect unrelated behaviors.
/// </summary>
/// <typeparam name="TConfig">Type of configuration.</typeparam>
/// <param name="baselineConfig">Baseline configuration.</param>
/// <param name="changedConfig">Changed configuration.</param>
/// <param name="changedSetting">Name of the setting that was changed.</param>
/// <param name="unrelatedBehaviors">Functions to capture behaviors that should not change.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Test result.</returns>
protected async Task<ConfigDiffTestResult> TestConfigIsolationAsync<TConfig>(
TConfig baselineConfig,
TConfig changedConfig,
string changedSetting,
IEnumerable<Func<TConfig, Task<object>>> unrelatedBehaviors,
CancellationToken ct = default)
where TConfig : notnull
{
_logger.LogInformation("Testing config isolation for setting: {Setting}", changedSetting);
var unexpectedChanges = new List<string>();
foreach (var getBehavior in unrelatedBehaviors)
{
var baselineBehavior = await getBehavior(baselineConfig);
var changedBehavior = await getBehavior(changedConfig);
try
{
// Unrelated behaviors should be identical
baselineBehavior.Should().BeEquivalentTo(changedBehavior,
$"Changing '{changedSetting}' should not affect unrelated behavior");
}
catch (Exception ex)
{
unexpectedChanges.Add($"Unexpected change in behavior: {ex.Message}");
}
}
return new ConfigDiffTestResult(
IsSuccess: unexpectedChanges.Count == 0,
ExpectedDelta: ConfigDelta.Empty,
ActualDelta: unexpectedChanges.Count > 0
? new ConfigDelta(
[.. unexpectedChanges],
[.. unexpectedChanges.Select(c => new BehaviorDelta(c, null, null, null))])
: ConfigDelta.Empty,
UnexpectedChanges: [.. unexpectedChanges],
MissingChanges: []);
}
/// <summary>
/// Assert that actual delta matches expected delta.
/// </summary>
/// <param name="actual">Actual delta.</param>
/// <param name="expected">Expected delta.</param>
/// <returns>Test result.</returns>
protected ConfigDiffTestResult AssertDeltaMatches(ConfigDelta actual, ConfigDelta expected)
{
var unexpectedChanges = new List<string>();
var missingChanges = new List<string>();
// Check for unexpected changes
foreach (var actualChange in actual.ChangedBehaviors)
{
if (_config.IgnoreBehaviors.Contains(actualChange))
{
continue;
}
if (!expected.ChangedBehaviors.Contains(actualChange))
{
unexpectedChanges.Add(actualChange);
_logger.LogWarning("Unexpected behavior change: {Behavior}", actualChange);
}
}
// Check for missing expected changes
foreach (var expectedChange in expected.ChangedBehaviors)
{
if (!actual.ChangedBehaviors.Contains(expectedChange))
{
missingChanges.Add(expectedChange);
_logger.LogWarning("Missing expected behavior change: {Behavior}", expectedChange);
}
}
// Verify actual change values match expected
foreach (var expectedDelta in expected.BehaviorDeltas)
{
var actualDelta = actual.BehaviorDeltas
.FirstOrDefault(d => d.BehaviorName == expectedDelta.BehaviorName);
if (actualDelta != null && expectedDelta.NewValue != null)
{
if (!ValuesMatch(actualDelta.NewValue, expectedDelta.NewValue))
{
unexpectedChanges.Add(
$"{expectedDelta.BehaviorName}: expected '{expectedDelta.NewValue}', got '{actualDelta.NewValue}'");
}
}
}
var isSuccess = unexpectedChanges.Count == 0 && missingChanges.Count == 0;
if (isSuccess)
{
_logger.LogInformation("Config diff test passed");
}
else
{
_logger.LogError(
"Config diff test failed: {Unexpected} unexpected, {Missing} missing",
unexpectedChanges.Count, missingChanges.Count);
}
return new ConfigDiffTestResult(
IsSuccess: isSuccess,
ExpectedDelta: expected,
ActualDelta: actual,
UnexpectedChanges: [.. unexpectedChanges],
MissingChanges: [.. missingChanges]);
}
/// <summary>
/// Compare behavior snapshot and generate delta.
/// </summary>
/// <param name="baseline">Baseline snapshot.</param>
/// <param name="changed">Changed snapshot.</param>
/// <returns>Config delta.</returns>
protected static ConfigDelta ComputeBehaviorSnapshotDelta(
BehaviorSnapshot baseline,
BehaviorSnapshot changed)
{
var changedBehaviors = new List<string>();
var deltas = new List<BehaviorDelta>();
// Find changed behaviors
foreach (var changedBehavior in changed.Behaviors)
{
var baselineBehavior = baseline.Behaviors
.FirstOrDefault(b => b.Name == changedBehavior.Name);
if (baselineBehavior == null)
{
// New behavior
changedBehaviors.Add(changedBehavior.Name);
deltas.Add(new BehaviorDelta(
changedBehavior.Name,
null,
changedBehavior.Value,
"New behavior"));
}
else if (baselineBehavior.Value != changedBehavior.Value)
{
// Changed behavior
changedBehaviors.Add(changedBehavior.Name);
deltas.Add(new BehaviorDelta(
changedBehavior.Name,
baselineBehavior.Value,
changedBehavior.Value,
null));
}
}
// Find removed behaviors
foreach (var baselineBehavior in baseline.Behaviors)
{
var changedBehavior = changed.Behaviors
.FirstOrDefault(b => b.Name == baselineBehavior.Name);
if (changedBehavior == null)
{
changedBehaviors.Add(baselineBehavior.Name);
deltas.Add(new BehaviorDelta(
baselineBehavior.Name,
baselineBehavior.Value,
null,
"Removed behavior"));
}
}
return new ConfigDelta([.. changedBehaviors], [.. deltas]);
}
/// <summary>
/// Create a behavior snapshot builder.
/// </summary>
/// <param name="configurationId">Configuration identifier.</param>
/// <returns>Behavior snapshot builder.</returns>
protected static BehaviorSnapshotBuilder CreateSnapshotBuilder(string configurationId)
{
return new BehaviorSnapshotBuilder(configurationId);
}
private bool ValuesMatch(string? actual, string? expected)
{
if (actual == expected)
{
return true;
}
if (actual == null || expected == null)
{
return false;
}
// Try numeric comparison with tolerance
if (_config.ValueComparisonTolerance > 0 &&
decimal.TryParse(actual, NumberStyles.Float, CultureInfo.InvariantCulture, out var actualNum) &&
decimal.TryParse(expected, NumberStyles.Float, CultureInfo.InvariantCulture, out var expectedNum))
{
return Math.Abs(actualNum - expectedNum) <= _config.ValueComparisonTolerance;
}
return false;
}
}
/// <summary>
/// Builder for behavior snapshots.
/// </summary>
public sealed class BehaviorSnapshotBuilder
{
private readonly string _configurationId;
private readonly List<CapturedBehavior> _behaviors = [];
private DateTimeOffset _capturedAt = DateTimeOffset.UtcNow;
/// <summary>
/// Initializes a new instance of the <see cref="BehaviorSnapshotBuilder"/> class.
/// </summary>
/// <param name="configurationId">Configuration identifier.</param>
public BehaviorSnapshotBuilder(string configurationId)
{
_configurationId = configurationId;
}
/// <summary>
/// Add a captured behavior.
/// </summary>
/// <param name="name">Behavior name.</param>
/// <param name="value">Behavior value.</param>
/// <returns>This builder for chaining.</returns>
public BehaviorSnapshotBuilder AddBehavior(string name, string value)
{
_behaviors.Add(new CapturedBehavior(name, value, _capturedAt));
return this;
}
/// <summary>
/// Add a captured behavior with object value.
/// </summary>
/// <param name="name">Behavior name.</param>
/// <param name="value">Behavior value (will be converted to string).</param>
/// <returns>This builder for chaining.</returns>
public BehaviorSnapshotBuilder AddBehavior(string name, object? value)
{
return AddBehavior(name, value?.ToString() ?? "null");
}
/// <summary>
/// Set the capture timestamp.
/// </summary>
/// <param name="capturedAt">Capture timestamp.</param>
/// <returns>This builder for chaining.</returns>
public BehaviorSnapshotBuilder WithCapturedAt(DateTimeOffset capturedAt)
{
_capturedAt = capturedAt;
return this;
}
/// <summary>
/// Build the behavior snapshot.
/// </summary>
/// <returns>Behavior snapshot.</returns>
public BehaviorSnapshot Build()
{
return new BehaviorSnapshot(
ConfigurationId: _configurationId,
Behaviors: [.. _behaviors],
CapturedAt: _capturedAt);
}
}

View File

@@ -0,0 +1,144 @@
// <copyright file="Models.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-018, CCUT-019
using System.Collections.Immutable;
namespace StellaOps.Testing.ConfigDiff;
/// <summary>
/// Delta between two configurations' behavioral outputs.
/// </summary>
/// <param name="ChangedBehaviors">Names of behaviors that changed.</param>
/// <param name="BehaviorDeltas">Detailed behavior changes.</param>
public sealed record ConfigDelta(
ImmutableArray<string> ChangedBehaviors,
ImmutableArray<BehaviorDelta> BehaviorDeltas)
{
/// <summary>
/// Gets a value indicating whether there are any changes.
/// </summary>
public bool HasChanges => ChangedBehaviors.Length > 0;
/// <summary>
/// Gets an empty delta representing no changes.
/// </summary>
public static ConfigDelta Empty { get; } = new([], []);
}
/// <summary>
/// A change in a specific behavior.
/// </summary>
/// <param name="BehaviorName">Name of the behavior that changed.</param>
/// <param name="OldValue">Previous value (null if not applicable).</param>
/// <param name="NewValue">New value (null if not applicable).</param>
/// <param name="Explanation">Human-readable explanation of the change.</param>
public sealed record BehaviorDelta(
string BehaviorName,
string? OldValue,
string? NewValue,
string? Explanation);
/// <summary>
/// Result of config-diff test.
/// </summary>
/// <param name="IsSuccess">Whether the test passed.</param>
/// <param name="ExpectedDelta">Expected configuration delta.</param>
/// <param name="ActualDelta">Actual configuration delta observed.</param>
/// <param name="UnexpectedChanges">Changes that were not expected.</param>
/// <param name="MissingChanges">Expected changes that did not occur.</param>
public sealed record ConfigDiffTestResult(
bool IsSuccess,
ConfigDelta ExpectedDelta,
ConfigDelta ActualDelta,
ImmutableArray<string> UnexpectedChanges,
ImmutableArray<string> MissingChanges);
/// <summary>
/// Configuration for config-diff testing.
/// </summary>
/// <param name="StrictMode">Whether to fail on any unexpected changes.</param>
/// <param name="IgnoreBehaviors">Behaviors to ignore in comparison.</param>
/// <param name="ValueComparisonTolerance">Tolerance for numeric value comparisons.</param>
public sealed record ConfigDiffTestConfig(
bool StrictMode = true,
ImmutableArray<string> IgnoreBehaviors = default,
decimal ValueComparisonTolerance = 0m)
{
/// <summary>
/// Gets behaviors to ignore with default empty array.
/// </summary>
public ImmutableArray<string> IgnoreBehaviors { get; init; } =
IgnoreBehaviors.IsDefault ? [] : IgnoreBehaviors;
}
/// <summary>
/// A captured behavior state.
/// </summary>
/// <param name="Name">Behavior name.</param>
/// <param name="Value">Behavior value.</param>
/// <param name="CapturedAt">When the behavior was captured.</param>
public sealed record CapturedBehavior(
string Name,
string Value,
DateTimeOffset CapturedAt);
/// <summary>
/// Complete behavior snapshot for a configuration.
/// </summary>
/// <param name="ConfigurationId">Identifier for the configuration.</param>
/// <param name="Behaviors">Captured behaviors.</param>
/// <param name="CapturedAt">When the snapshot was taken.</param>
public sealed record BehaviorSnapshot(
string ConfigurationId,
ImmutableArray<CapturedBehavior> Behaviors,
DateTimeOffset CapturedAt)
{
/// <summary>
/// Get behavior value by name.
/// </summary>
/// <param name="name">Behavior name.</param>
/// <returns>Value if found, null otherwise.</returns>
public string? GetBehaviorValue(string name)
{
return Behaviors.FirstOrDefault(b => b.Name == name)?.Value;
}
}
/// <summary>
/// Description of an expected change for documentation/auditing.
/// </summary>
/// <param name="ConfigSetting">Name of the config setting changed.</param>
/// <param name="OldConfigValue">Old config value.</param>
/// <param name="NewConfigValue">New config value.</param>
/// <param name="ExpectedBehavioralChanges">Expected behavioral impact.</param>
/// <param name="Justification">Why this change is expected.</param>
public sealed record ExpectedConfigChange(
string ConfigSetting,
string OldConfigValue,
string NewConfigValue,
ImmutableArray<string> ExpectedBehavioralChanges,
string Justification);
/// <summary>
/// Report of config-diff test suite.
/// </summary>
/// <param name="TotalTests">Total number of tests.</param>
/// <param name="PassedTests">Number of passed tests.</param>
/// <param name="FailedTests">Number of failed tests.</param>
/// <param name="Results">Individual test results.</param>
/// <param name="TotalDurationMs">Total duration in milliseconds.</param>
public sealed record ConfigDiffReport(
int TotalTests,
int PassedTests,
int FailedTests,
ImmutableArray<ConfigDiffTestResult> Results,
long TotalDurationMs)
{
/// <summary>
/// Gets a value indicating whether all tests passed.
/// </summary>
public bool IsSuccess => FailedTests == 0;
}

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Configuration-diff testing framework for behavioral delta verification</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.ConfigDiff.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,208 @@
// <copyright file="BranchCoverageEnforcer.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-014
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
namespace StellaOps.Testing.Coverage;
/// <summary>
/// Enforces minimum branch coverage and detects dead paths.
/// </summary>
public sealed class BranchCoverageEnforcer
{
private readonly CoverageReport _report;
private readonly BranchCoverageConfig _config;
private readonly ILogger _logger;
/// <summary>
/// Initializes a new instance of the <see cref="BranchCoverageEnforcer"/> class.
/// </summary>
/// <param name="report">Coverage report to analyze.</param>
/// <param name="config">Enforcement configuration.</param>
/// <param name="logger">Logger instance.</param>
public BranchCoverageEnforcer(
CoverageReport report,
BranchCoverageConfig? config = null,
ILogger? logger = null)
{
_report = report;
_config = config ?? new BranchCoverageConfig();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger.Instance;
}
/// <summary>
/// Verify branch coverage meets minimum threshold.
/// </summary>
/// <returns>Validation result.</returns>
public CoverageValidationResult Validate()
{
var violations = new List<CoverageViolation>();
foreach (var file in _report.Files)
{
// Skip excluded files
if (IsExcluded(file.Path))
{
_logger.LogDebug("Skipping excluded file: {Path}", file.Path);
continue;
}
// Check file-level coverage
if (file.BranchCoverage < _config.MinBranchCoverage)
{
var uncoveredLines = GetUncoveredBranches(file);
violations.Add(new CoverageViolation(
FilePath: file.Path,
Type: ViolationType.InsufficientCoverage,
ActualCoverage: file.BranchCoverage,
RequiredCoverage: _config.MinBranchCoverage,
UncoveredBranches: uncoveredLines));
_logger.LogWarning(
"Insufficient coverage in {Path}: {Actual:P1} < {Required:P1}",
file.Path, file.BranchCoverage, _config.MinBranchCoverage);
}
// Detect completely uncovered branches (dead paths)
if (_config.FailOnDeadPaths)
{
var deadPaths = file.Branches
.Where(b => b.HitCount == 0 && !IsExempt(file.Path, b.Line))
.ToList();
if (deadPaths.Count > 0)
{
violations.Add(new CoverageViolation(
FilePath: file.Path,
Type: ViolationType.DeadPath,
ActualCoverage: file.BranchCoverage,
RequiredCoverage: _config.MinBranchCoverage,
UncoveredBranches: [.. deadPaths.Select(b => b.Line)]));
_logger.LogWarning(
"Dead paths found in {Path}: {Count} uncovered branches",
file.Path, deadPaths.Count);
}
}
}
return new CoverageValidationResult(
IsValid: violations.Count == 0,
Violations: [.. violations],
OverallBranchCoverage: _report.OverallBranchCoverage);
}
/// <summary>
/// Generate report of dead paths for review.
/// </summary>
/// <returns>Dead path report.</returns>
public DeadPathReport GenerateDeadPathReport()
{
var deadPaths = new List<DeadPathEntry>();
foreach (var file in _report.Files)
{
if (IsExcluded(file.Path))
{
continue;
}
foreach (var branch in file.Branches.Where(b => b.HitCount == 0))
{
var isExempt = IsExempt(file.Path, branch.Line);
var exemptionReason = isExempt ? GetExemptionReason(file.Path, branch.Line) : null;
deadPaths.Add(new DeadPathEntry(
FilePath: file.Path,
Line: branch.Line,
BranchType: branch.Type,
IsExempt: isExempt,
ExemptionReason: exemptionReason));
}
}
return new DeadPathReport(
TotalDeadPaths: deadPaths.Count,
ExemptDeadPaths: deadPaths.Count(p => p.IsExempt),
ActiveDeadPaths: deadPaths.Count(p => !p.IsExempt),
Entries: [.. deadPaths]);
}
/// <summary>
/// Get a summary of coverage by directory.
/// </summary>
/// <returns>Dictionary of directory to coverage percentage.</returns>
public IReadOnlyDictionary<string, decimal> GetCoverageByDirectory()
{
var byDirectory = new Dictionary<string, List<decimal>>();
foreach (var file in _report.Files)
{
if (IsExcluded(file.Path))
{
continue;
}
var directory = Path.GetDirectoryName(file.Path) ?? ".";
if (!byDirectory.TryGetValue(directory, out var coverages))
{
coverages = [];
byDirectory[directory] = coverages;
}
coverages.Add(file.BranchCoverage);
}
return byDirectory.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value.Count > 0 ? kvp.Value.Average() : 0m);
}
/// <summary>
/// Get files below minimum coverage threshold.
/// </summary>
/// <returns>List of files below threshold.</returns>
public IReadOnlyList<FileCoverage> GetFilesBelowThreshold()
{
return _report.Files
.Where(f => !IsExcluded(f.Path) && f.BranchCoverage < _config.MinBranchCoverage)
.OrderBy(f => f.BranchCoverage)
.ToList();
}
private ImmutableArray<int> GetUncoveredBranches(FileCoverage file)
{
return [.. file.Branches
.Where(b => b.HitCount == 0)
.Select(b => b.Line)
.Distinct()
.OrderBy(l => l)];
}
private bool IsExcluded(string filePath)
{
return _config.ExcludePatterns.Any(p => p.IsMatch(filePath));
}
private bool IsExempt(string filePath, int line)
{
return _config.Exemptions.Any(e =>
e.FilePattern.IsMatch(filePath) &&
(e.Lines.IsDefaultOrEmpty || e.Lines.Contains(line)));
}
private string? GetExemptionReason(string filePath, int line)
{
var exemption = _config.Exemptions.FirstOrDefault(e =>
e.FilePattern.IsMatch(filePath) &&
(e.Lines.IsDefaultOrEmpty || e.Lines.Contains(line)));
return exemption?.Reason;
}
}

View File

@@ -0,0 +1,164 @@
// <copyright file="CoberturaParser.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-014
using System.Collections.Immutable;
using System.Globalization;
using System.Xml.Linq;
namespace StellaOps.Testing.Coverage;
/// <summary>
/// Parses Cobertura XML coverage reports.
/// </summary>
public static class CoberturaParser
{
/// <summary>
/// Parse a Cobertura XML file.
/// </summary>
/// <param name="filePath">Path to Cobertura XML file.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Parsed coverage report.</returns>
public static async Task<CoverageReport> ParseFileAsync(string filePath, CancellationToken ct = default)
{
var xml = await File.ReadAllTextAsync(filePath, ct);
return Parse(xml);
}
/// <summary>
/// Parse a Cobertura XML string.
/// </summary>
/// <param name="xml">Cobertura XML content.</param>
/// <returns>Parsed coverage report.</returns>
public static CoverageReport Parse(string xml)
{
var doc = XDocument.Parse(xml);
var coverage = doc.Root ?? throw new InvalidOperationException("Invalid Cobertura XML: no root element");
var files = new List<FileCoverage>();
// Parse overall coverage
var lineCoverage = ParseDecimal(coverage.Attribute("line-rate")?.Value ?? "0");
var branchCoverage = ParseDecimal(coverage.Attribute("branch-rate")?.Value ?? "0");
// Parse timestamp
var timestamp = coverage.Attribute("timestamp")?.Value;
var generatedAt = timestamp != null
? DateTimeOffset.FromUnixTimeSeconds(long.Parse(timestamp, CultureInfo.InvariantCulture))
: DateTimeOffset.UtcNow;
// Parse packages -> classes -> files
foreach (var package in coverage.Descendants("package"))
{
foreach (var cls in package.Descendants("class"))
{
var fileCoverage = ParseClass(cls);
if (fileCoverage != null)
{
files.Add(fileCoverage);
}
}
}
return new CoverageReport(
Files: [.. files],
OverallLineCoverage: lineCoverage,
OverallBranchCoverage: branchCoverage,
GeneratedAt: generatedAt);
}
private static FileCoverage? ParseClass(XElement cls)
{
var filename = cls.Attribute("filename")?.Value;
if (string.IsNullOrEmpty(filename))
{
return null;
}
var lineCoverage = ParseDecimal(cls.Attribute("line-rate")?.Value ?? "0");
var branchCoverage = ParseDecimal(cls.Attribute("branch-rate")?.Value ?? "0");
var lines = new List<LineCoverageData>();
var branches = new List<BranchCoverageData>();
var linesElement = cls.Element("lines");
if (linesElement != null)
{
foreach (var line in linesElement.Elements("line"))
{
var lineNumber = int.Parse(line.Attribute("number")?.Value ?? "0", CultureInfo.InvariantCulture);
var hits = int.Parse(line.Attribute("hits")?.Value ?? "0", CultureInfo.InvariantCulture);
var isBranch = line.Attribute("branch")?.Value == "true";
lines.Add(new LineCoverageData(
LineNumber: lineNumber,
HitCount: hits,
IsCoverable: true));
// Parse branch conditions if present
if (isBranch)
{
var conditionCoverage = line.Attribute("condition-coverage")?.Value;
var conditions = line.Element("conditions");
if (conditions != null)
{
var branchIndex = 0;
foreach (var condition in conditions.Elements("condition"))
{
var coverage = int.Parse(
condition.Attribute("coverage")?.Value ?? "0",
CultureInfo.InvariantCulture);
branches.Add(new BranchCoverageData(
Line: lineNumber,
BranchId: $"{lineNumber}-{branchIndex}",
Type: condition.Attribute("type")?.Value ?? "branch",
HitCount: coverage > 0 ? 1 : 0));
branchIndex++;
}
}
else if (conditionCoverage != null)
{
// Parse condition-coverage like "50% (1/2)"
var parts = conditionCoverage.Split(['(', '/', ')'], StringSplitOptions.RemoveEmptyEntries);
if (parts.Length >= 2)
{
var covered = int.Parse(parts[0].TrimEnd('%'), CultureInfo.InvariantCulture);
var total = int.Parse(parts[1], CultureInfo.InvariantCulture);
for (int i = 0; i < total; i++)
{
branches.Add(new BranchCoverageData(
Line: lineNumber,
BranchId: $"{lineNumber}-{i}",
Type: "branch",
HitCount: i < (covered * total / 100) ? 1 : 0));
}
}
}
}
}
}
return new FileCoverage(
Path: filename,
LineCoverage: lineCoverage,
BranchCoverage: branchCoverage,
Lines: [.. lines],
Branches: [.. branches]);
}
private static decimal ParseDecimal(string value)
{
if (decimal.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
{
return result;
}
return 0m;
}
}

View File

@@ -0,0 +1,181 @@
// <copyright file="Models.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-013, CCUT-014
using System.Collections.Immutable;
using System.Text.RegularExpressions;
namespace StellaOps.Testing.Coverage;
/// <summary>
/// Coverage report for analysis.
/// </summary>
/// <param name="Files">Files with coverage data.</param>
/// <param name="OverallLineCoverage">Overall line coverage percentage.</param>
/// <param name="OverallBranchCoverage">Overall branch coverage percentage.</param>
/// <param name="GeneratedAt">When the report was generated.</param>
public sealed record CoverageReport(
ImmutableArray<FileCoverage> Files,
decimal OverallLineCoverage,
decimal OverallBranchCoverage,
DateTimeOffset GeneratedAt);
/// <summary>
/// Coverage data for a single file.
/// </summary>
/// <param name="Path">File path.</param>
/// <param name="LineCoverage">Line coverage percentage (0-1).</param>
/// <param name="BranchCoverage">Branch coverage percentage (0-1).</param>
/// <param name="Lines">Individual line coverage data.</param>
/// <param name="Branches">Individual branch coverage data.</param>
public sealed record FileCoverage(
string Path,
decimal LineCoverage,
decimal BranchCoverage,
ImmutableArray<LineCoverageData> Lines,
ImmutableArray<BranchCoverageData> Branches);
/// <summary>
/// Coverage data for a single line.
/// </summary>
/// <param name="LineNumber">Line number.</param>
/// <param name="HitCount">Number of times line was executed.</param>
/// <param name="IsCoverable">Whether line is coverable.</param>
public sealed record LineCoverageData(
int LineNumber,
int HitCount,
bool IsCoverable);
/// <summary>
/// Coverage data for a single branch.
/// </summary>
/// <param name="Line">Line number where branch occurs.</param>
/// <param name="BranchId">Branch identifier.</param>
/// <param name="Type">Type of branch (if/else, switch, etc.).</param>
/// <param name="HitCount">Number of times branch was taken.</param>
public sealed record BranchCoverageData(
int Line,
string BranchId,
string Type,
int HitCount);
/// <summary>
/// Configuration for branch coverage enforcement.
/// </summary>
/// <param name="MinBranchCoverage">Minimum required branch coverage (0-1).</param>
/// <param name="FailOnDeadPaths">Whether to fail on dead paths.</param>
/// <param name="Exemptions">Coverage exemptions.</param>
/// <param name="ExcludePatterns">File patterns to exclude from coverage analysis.</param>
public sealed record BranchCoverageConfig(
decimal MinBranchCoverage = 0.80m,
bool FailOnDeadPaths = true,
ImmutableArray<CoverageExemption> Exemptions = default,
ImmutableArray<Regex> ExcludePatterns = default)
{
/// <summary>
/// Gets exemptions with default empty array.
/// </summary>
public ImmutableArray<CoverageExemption> Exemptions { get; init; } =
Exemptions.IsDefault ? [] : Exemptions;
/// <summary>
/// Gets exclude patterns with default empty array.
/// </summary>
public ImmutableArray<Regex> ExcludePatterns { get; init; } =
ExcludePatterns.IsDefault ? GetDefaultExcludePatterns() : ExcludePatterns;
private static ImmutableArray<Regex> GetDefaultExcludePatterns()
{
return
[
new Regex(@"\.Tests\.cs$", RegexOptions.Compiled),
new Regex(@"\.Generated\.cs$", RegexOptions.Compiled),
new Regex(@"[\\/]obj[\\/]", RegexOptions.Compiled),
new Regex(@"[\\/]bin[\\/]", RegexOptions.Compiled),
new Regex(@"GlobalUsings\.cs$", RegexOptions.Compiled)
];
}
}
/// <summary>
/// A coverage exemption.
/// </summary>
/// <param name="FilePattern">Regex pattern matching file paths.</param>
/// <param name="Lines">Specific lines exempt (empty for all lines).</param>
/// <param name="Reason">Reason for exemption.</param>
public sealed record CoverageExemption(
Regex FilePattern,
ImmutableArray<int> Lines,
string Reason);
/// <summary>
/// Result of coverage validation.
/// </summary>
/// <param name="IsValid">Whether validation passed.</param>
/// <param name="Violations">List of violations found.</param>
/// <param name="OverallBranchCoverage">Overall branch coverage.</param>
public sealed record CoverageValidationResult(
bool IsValid,
ImmutableArray<CoverageViolation> Violations,
decimal OverallBranchCoverage);
/// <summary>
/// A coverage violation.
/// </summary>
/// <param name="FilePath">File with violation.</param>
/// <param name="Type">Type of violation.</param>
/// <param name="ActualCoverage">Actual coverage percentage.</param>
/// <param name="RequiredCoverage">Required coverage percentage.</param>
/// <param name="UncoveredBranches">Lines with uncovered branches.</param>
public sealed record CoverageViolation(
string FilePath,
ViolationType Type,
decimal ActualCoverage,
decimal RequiredCoverage,
ImmutableArray<int> UncoveredBranches);
/// <summary>
/// Type of coverage violation.
/// </summary>
public enum ViolationType
{
/// <summary>
/// Coverage below minimum threshold.
/// </summary>
InsufficientCoverage,
/// <summary>
/// Dead path detected (branch never taken).
/// </summary>
DeadPath
}
/// <summary>
/// A dead path entry.
/// </summary>
/// <param name="FilePath">File containing dead path.</param>
/// <param name="Line">Line number.</param>
/// <param name="BranchType">Type of branch.</param>
/// <param name="IsExempt">Whether this path is exempt.</param>
/// <param name="ExemptionReason">Reason for exemption if applicable.</param>
public sealed record DeadPathEntry(
string FilePath,
int Line,
string BranchType,
bool IsExempt,
string? ExemptionReason);
/// <summary>
/// Report of dead paths found in codebase.
/// </summary>
/// <param name="TotalDeadPaths">Total number of dead paths.</param>
/// <param name="ExemptDeadPaths">Number of exempt dead paths.</param>
/// <param name="ActiveDeadPaths">Number of active (non-exempt) dead paths.</param>
/// <param name="Entries">Individual dead path entries.</param>
public sealed record DeadPathReport(
int TotalDeadPaths,
int ExemptDeadPaths,
int ActiveDeadPaths,
ImmutableArray<DeadPathEntry> Entries);

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Branch coverage enforcement and dead-path detection framework</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.Coverage.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Testing.Evidence\StellaOps.Testing.Evidence.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,427 @@
// <copyright file="TestEvidenceServiceTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_002_TEST_trace_replay_evidence
// Task: TREP-013, TREP-014
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using Xunit;
namespace StellaOps.Testing.Evidence.Tests;
[Trait("Category", "Unit")]
public sealed class TestEvidenceServiceTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly TestEvidenceService _service;
public TestEvidenceServiceTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero));
_service = new TestEvidenceService(
NullLogger<TestEvidenceService>.Instance,
_timeProvider);
}
[Fact]
public async Task BeginSessionAsync_CreatesSession_WithMetadata()
{
// Arrange
var metadata = CreateTestMetadata();
// Act
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
// Assert
session.Should().NotBeNull();
session.Metadata.Should().Be(metadata);
session.IsFinalized.Should().BeFalse();
session.GetResults().Should().BeEmpty();
}
[Fact]
public async Task RecordTestResultAsync_AddsResultToSession()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var result = CreateTestResult("test-1", TestOutcome.Passed);
// Act
await _service.RecordTestResultAsync(session, result, TestContext.Current.CancellationToken);
// Assert
var results = session.GetResults();
results.Should().HaveCount(1);
results[0].Should().Be(result);
}
[Fact]
public async Task RecordTestResultAsync_SupportsMultipleResults()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var results = new[]
{
CreateTestResult("test-1", TestOutcome.Passed),
CreateTestResult("test-2", TestOutcome.Failed),
CreateTestResult("test-3", TestOutcome.Skipped)
};
// Act
foreach (var result in results)
{
await _service.RecordTestResultAsync(session, result, TestContext.Current.CancellationToken);
}
// Assert
var recordedResults = session.GetResults();
recordedResults.Should().HaveCount(3);
recordedResults.Should().Contain(r => r.Outcome == TestOutcome.Passed);
recordedResults.Should().Contain(r => r.Outcome == TestOutcome.Failed);
recordedResults.Should().Contain(r => r.Outcome == TestOutcome.Skipped);
}
[Fact]
public async Task FinalizeSessionAsync_CreatesBundle_WithCorrectSummary()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session, CreateTestResult("test-1", TestOutcome.Passed), TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session, CreateTestResult("test-2", TestOutcome.Passed), TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session, CreateTestResult("test-3", TestOutcome.Failed), TestContext.Current.CancellationToken);
// Act
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
bundle.Summary.TotalTests.Should().Be(3);
bundle.Summary.Passed.Should().Be(2);
bundle.Summary.Failed.Should().Be(1);
bundle.Summary.Skipped.Should().Be(0);
}
[Fact]
public async Task FinalizeSessionAsync_MarksSessionAsFinalized()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
// Act
await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
session.IsFinalized.Should().BeTrue();
}
[Fact]
public async Task FinalizeSessionAsync_ThrowsIfAlreadyFinalized()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Act
var act = async () => await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage("*already finalized*");
}
[Fact]
public async Task FinalizeSessionAsync_GeneratesDeterministicBundleId()
{
// Arrange
var metadata = CreateTestMetadata();
var session1 = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var session2 = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var result = CreateTestResult("test-1", TestOutcome.Passed);
await _service.RecordTestResultAsync(session1, result, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session2, result, TestContext.Current.CancellationToken);
// Act
var bundle1 = await _service.FinalizeSessionAsync(session1, TestContext.Current.CancellationToken);
var bundle2 = await _service.FinalizeSessionAsync(session2, TestContext.Current.CancellationToken);
// Assert
bundle1.BundleId.Should().Be(bundle2.BundleId);
bundle1.BundleId.Should().StartWith("teb-");
}
[Fact]
public async Task FinalizeSessionAsync_ComputesMerkleRoot()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session, CreateTestResult("test-1", TestOutcome.Passed), TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session, CreateTestResult("test-2", TestOutcome.Failed), TestContext.Current.CancellationToken);
// Act
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
bundle.MerkleRoot.Should().NotBeNullOrEmpty();
bundle.MerkleRoot.Should().HaveLength(64); // SHA-256 hex
}
[Fact]
public async Task FinalizeSessionAsync_MerkleRootIsDeterministic()
{
// Arrange
var metadata = CreateTestMetadata();
var session1 = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var session2 = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var results = new[]
{
CreateTestResult("test-1", TestOutcome.Passed),
CreateTestResult("test-2", TestOutcome.Failed)
};
foreach (var result in results)
{
await _service.RecordTestResultAsync(session1, result, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session2, result, TestContext.Current.CancellationToken);
}
// Act
var bundle1 = await _service.FinalizeSessionAsync(session1, TestContext.Current.CancellationToken);
var bundle2 = await _service.FinalizeSessionAsync(session2, TestContext.Current.CancellationToken);
// Assert
bundle1.MerkleRoot.Should().Be(bundle2.MerkleRoot);
}
[Fact]
public async Task FinalizeSessionAsync_RecordsFinalizedTimestamp()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
var expectedTime = _timeProvider.GetUtcNow();
// Act
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
bundle.FinalizedAt.Should().Be(expectedTime);
}
[Fact]
public async Task FinalizeSessionAsync_CreatesEvidenceLockerRef()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
// Act
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
bundle.EvidenceLockerRef.Should().StartWith("evidence://");
bundle.EvidenceLockerRef.Should().Contain(bundle.BundleId);
}
[Fact]
public async Task GetBundleAsync_ReturnsStoredBundle()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session, CreateTestResult("test-1", TestOutcome.Passed), TestContext.Current.CancellationToken);
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Act
var retrieved = await _service.GetBundleAsync(bundle.BundleId, TestContext.Current.CancellationToken);
// Assert
retrieved.Should().NotBeNull();
retrieved!.BundleId.Should().Be(bundle.BundleId);
retrieved.MerkleRoot.Should().Be(bundle.MerkleRoot);
}
[Fact]
public async Task GetBundleAsync_ReturnsNull_WhenBundleNotFound()
{
// Act
var result = await _service.GetBundleAsync("non-existent-bundle", TestContext.Current.CancellationToken);
// Assert
result.Should().BeNull();
}
[Fact]
public async Task FinalizeSessionAsync_ComputesTotalDuration()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session,
CreateTestResult("test-1", TestOutcome.Passed, TimeSpan.FromMilliseconds(100)), TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session,
CreateTestResult("test-2", TestOutcome.Passed, TimeSpan.FromMilliseconds(200)), TestContext.Current.CancellationToken);
// Act
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
bundle.Summary.TotalDuration.Should().Be(TimeSpan.FromMilliseconds(300));
}
[Fact]
public async Task FinalizeSessionAsync_GroupsResultsByCategory()
{
// Arrange
var metadata = CreateTestMetadata();
var session = await _service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session,
CreateTestResultWithCategories("test-1", TestOutcome.Passed, ["Unit"]), TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session,
CreateTestResultWithCategories("test-2", TestOutcome.Passed, ["Unit", "Fast"]), TestContext.Current.CancellationToken);
await _service.RecordTestResultAsync(session,
CreateTestResultWithCategories("test-3", TestOutcome.Passed, ["Integration"]), TestContext.Current.CancellationToken);
// Act
var bundle = await _service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
// Assert
bundle.Summary.ResultsByCategory.Should().ContainKey("Unit");
bundle.Summary.ResultsByCategory["Unit"].Should().Be(2);
bundle.Summary.ResultsByCategory["Fast"].Should().Be(1);
bundle.Summary.ResultsByCategory["Integration"].Should().Be(1);
}
private TestSessionMetadata CreateTestMetadata() =>
new(
SessionId: "session-1",
TestSuiteId: "suite-1",
GitCommit: "abc123",
GitBranch: "main",
RunnerEnvironment: "local",
StartedAt: _timeProvider.GetUtcNow(),
Labels: ImmutableDictionary<string, string>.Empty);
private static TestResultRecord CreateTestResult(
string testId,
TestOutcome outcome,
TimeSpan? duration = null) =>
new(
TestId: testId,
TestName: $"Test_{testId}",
TestClass: "TestClass",
Outcome: outcome,
Duration: duration ?? TimeSpan.FromMilliseconds(50),
FailureMessage: outcome == TestOutcome.Failed ? "Test failed" : null,
StackTrace: null,
Categories: [],
BlastRadiusAnnotations: [],
Attachments: ImmutableDictionary<string, string>.Empty);
private static TestResultRecord CreateTestResultWithCategories(
string testId,
TestOutcome outcome,
string[] categories) =>
new(
TestId: testId,
TestName: $"Test_{testId}",
TestClass: "TestClass",
Outcome: outcome,
Duration: TimeSpan.FromMilliseconds(50),
FailureMessage: null,
StackTrace: null,
Categories: [.. categories],
BlastRadiusAnnotations: [],
Attachments: ImmutableDictionary<string, string>.Empty);
}
[Trait("Category", "Unit")]
public sealed class TestEvidenceSessionTests
{
[Fact]
public async Task AddResult_ThrowsWhenFinalized()
{
// Arrange
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero));
var service = new TestEvidenceService(
NullLogger<TestEvidenceService>.Instance,
timeProvider);
var metadata = new TestSessionMetadata(
SessionId: "session-1",
TestSuiteId: "suite-1",
GitCommit: "abc123",
GitBranch: "main",
RunnerEnvironment: "local",
StartedAt: DateTimeOffset.UtcNow,
Labels: ImmutableDictionary<string, string>.Empty);
var session = await service.BeginSessionAsync(metadata, TestContext.Current.CancellationToken);
await service.FinalizeSessionAsync(session, TestContext.Current.CancellationToken);
var result = new TestResultRecord(
TestId: "test-1",
TestName: "Test_1",
TestClass: "TestClass",
Outcome: TestOutcome.Passed,
Duration: TimeSpan.FromMilliseconds(50),
FailureMessage: null,
StackTrace: null,
Categories: [],
BlastRadiusAnnotations: [],
Attachments: ImmutableDictionary<string, string>.Empty);
// Act
var act = () => session.AddResult(result);
// Assert
act.Should().Throw<InvalidOperationException>()
.WithMessage("*finalized*");
}
[Fact]
public void GetResults_ReturnsImmutableCopy()
{
// Arrange
var metadata = new TestSessionMetadata(
SessionId: "session-1",
TestSuiteId: "suite-1",
GitCommit: "abc123",
GitBranch: "main",
RunnerEnvironment: "local",
StartedAt: DateTimeOffset.UtcNow,
Labels: ImmutableDictionary<string, string>.Empty);
var session = new TestEvidenceSession(metadata);
var result = new TestResultRecord(
TestId: "test-1",
TestName: "Test_1",
TestClass: "TestClass",
Outcome: TestOutcome.Passed,
Duration: TimeSpan.FromMilliseconds(50),
FailureMessage: null,
StackTrace: null,
Categories: [],
BlastRadiusAnnotations: [],
Attachments: ImmutableDictionary<string, string>.Empty);
session.AddResult(result);
// Act
var results1 = session.GetResults();
session.AddResult(result);
var results2 = session.GetResults();
// Assert
results1.Should().HaveCount(1);
results2.Should().HaveCount(2);
}
}

View File

@@ -0,0 +1,214 @@
// <copyright file="ITestEvidenceService.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_002_TEST_trace_replay_evidence
// Task: TREP-013, TREP-014
using System.Collections.Immutable;
namespace StellaOps.Testing.Evidence;
/// <summary>
/// Links test executions to EvidenceLocker for audit-grade storage.
/// </summary>
public interface ITestEvidenceService
{
/// <summary>
/// Begin a test evidence session.
/// </summary>
/// <param name="metadata">Session metadata.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The created session.</returns>
Task<TestEvidenceSession> BeginSessionAsync(
TestSessionMetadata metadata,
CancellationToken ct = default);
/// <summary>
/// Record a test result within a session.
/// </summary>
/// <param name="session">The active session.</param>
/// <param name="result">The test result to record.</param>
/// <param name="ct">Cancellation token.</param>
Task RecordTestResultAsync(
TestEvidenceSession session,
TestResultRecord result,
CancellationToken ct = default);
/// <summary>
/// Finalize session and store in EvidenceLocker.
/// </summary>
/// <param name="session">The session to finalize.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The evidence bundle.</returns>
Task<TestEvidenceBundle> FinalizeSessionAsync(
TestEvidenceSession session,
CancellationToken ct = default);
/// <summary>
/// Retrieve test evidence bundle for audit.
/// </summary>
/// <param name="bundleId">The bundle identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The evidence bundle, or null if not found.</returns>
Task<TestEvidenceBundle?> GetBundleAsync(
string bundleId,
CancellationToken ct = default);
}
/// <summary>
/// Metadata about a test session.
/// </summary>
/// <param name="SessionId">Unique session identifier.</param>
/// <param name="TestSuiteId">Identifier for the test suite.</param>
/// <param name="GitCommit">Git commit hash.</param>
/// <param name="GitBranch">Git branch name.</param>
/// <param name="RunnerEnvironment">Description of the runner environment.</param>
/// <param name="StartedAt">When the session started.</param>
/// <param name="Labels">Additional labels.</param>
public sealed record TestSessionMetadata(
string SessionId,
string TestSuiteId,
string GitCommit,
string GitBranch,
string RunnerEnvironment,
DateTimeOffset StartedAt,
ImmutableDictionary<string, string> Labels);
/// <summary>
/// A recorded test result.
/// </summary>
/// <param name="TestId">Unique test identifier.</param>
/// <param name="TestName">Test method name.</param>
/// <param name="TestClass">Test class name.</param>
/// <param name="Outcome">Test outcome.</param>
/// <param name="Duration">Test duration.</param>
/// <param name="FailureMessage">Failure message, if failed.</param>
/// <param name="StackTrace">Stack trace, if failed.</param>
/// <param name="Categories">Test categories.</param>
/// <param name="BlastRadiusAnnotations">Blast radius annotations.</param>
/// <param name="Attachments">Attached file references.</param>
public sealed record TestResultRecord(
string TestId,
string TestName,
string TestClass,
TestOutcome Outcome,
TimeSpan Duration,
string? FailureMessage,
string? StackTrace,
ImmutableArray<string> Categories,
ImmutableArray<string> BlastRadiusAnnotations,
ImmutableDictionary<string, string> Attachments);
/// <summary>
/// Test outcome.
/// </summary>
public enum TestOutcome
{
Passed,
Failed,
Skipped,
Inconclusive
}
/// <summary>
/// A finalized test evidence bundle.
/// </summary>
/// <param name="BundleId">Unique bundle identifier.</param>
/// <param name="MerkleRoot">Merkle root for integrity verification.</param>
/// <param name="Metadata">Session metadata.</param>
/// <param name="Summary">Test summary.</param>
/// <param name="Results">All test results.</param>
/// <param name="FinalizedAt">When the bundle was finalized.</param>
/// <param name="EvidenceLockerRef">Reference to EvidenceLocker storage.</param>
public sealed record TestEvidenceBundle(
string BundleId,
string MerkleRoot,
TestSessionMetadata Metadata,
TestSummary Summary,
ImmutableArray<TestResultRecord> Results,
DateTimeOffset FinalizedAt,
string EvidenceLockerRef);
/// <summary>
/// Summary of test results.
/// </summary>
/// <param name="TotalTests">Total number of tests.</param>
/// <param name="Passed">Number of passed tests.</param>
/// <param name="Failed">Number of failed tests.</param>
/// <param name="Skipped">Number of skipped tests.</param>
/// <param name="TotalDuration">Total test duration.</param>
/// <param name="ResultsByCategory">Results grouped by category.</param>
/// <param name="ResultsByBlastRadius">Results grouped by blast radius.</param>
public sealed record TestSummary(
int TotalTests,
int Passed,
int Failed,
int Skipped,
TimeSpan TotalDuration,
ImmutableDictionary<string, int> ResultsByCategory,
ImmutableDictionary<string, int> ResultsByBlastRadius);
/// <summary>
/// An active test evidence session.
/// </summary>
public sealed class TestEvidenceSession
{
private readonly List<TestResultRecord> _results = [];
private readonly object _lock = new();
/// <summary>
/// Gets the session metadata.
/// </summary>
public TestSessionMetadata Metadata { get; }
/// <summary>
/// Gets whether the session is finalized.
/// </summary>
public bool IsFinalized { get; private set; }
/// <summary>
/// Initializes a new instance of the <see cref="TestEvidenceSession"/> class.
/// </summary>
/// <param name="metadata">Session metadata.</param>
public TestEvidenceSession(TestSessionMetadata metadata)
{
Metadata = metadata;
}
/// <summary>
/// Add a test result to the session.
/// </summary>
/// <param name="result">The result to add.</param>
public void AddResult(TestResultRecord result)
{
if (IsFinalized)
{
throw new InvalidOperationException("Cannot add results to a finalized session.");
}
lock (_lock)
{
_results.Add(result);
}
}
/// <summary>
/// Get all results recorded in this session.
/// </summary>
/// <returns>Immutable array of results.</returns>
public ImmutableArray<TestResultRecord> GetResults()
{
lock (_lock)
{
return [.. _results];
}
}
/// <summary>
/// Mark the session as finalized.
/// </summary>
internal void MarkAsFinalized()
{
IsFinalized = true;
}
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Test evidence storage and linking to EvidenceLocker for audit-grade test artifacts</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,191 @@
// <copyright file="TestEvidenceService.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Testing.Evidence;
/// <summary>
/// Default implementation of test evidence service.
/// </summary>
public sealed class TestEvidenceService : ITestEvidenceService
{
private readonly ILogger<TestEvidenceService> _logger;
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<string, TestEvidenceBundle> _bundles = new();
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Initializes a new instance of the <see cref="TestEvidenceService"/> class.
/// </summary>
/// <param name="logger">Logger instance.</param>
/// <param name="timeProvider">Time provider for timestamps.</param>
public TestEvidenceService(
ILogger<TestEvidenceService> logger,
TimeProvider timeProvider)
{
_logger = logger;
_timeProvider = timeProvider;
}
/// <inheritdoc/>
public Task<TestEvidenceSession> BeginSessionAsync(
TestSessionMetadata metadata,
CancellationToken ct = default)
{
var session = new TestEvidenceSession(metadata);
_logger.LogInformation(
"Started test evidence session {SessionId} for suite {TestSuiteId}",
metadata.SessionId, metadata.TestSuiteId);
return Task.FromResult(session);
}
/// <inheritdoc/>
public Task RecordTestResultAsync(
TestEvidenceSession session,
TestResultRecord result,
CancellationToken ct = default)
{
session.AddResult(result);
_logger.LogDebug(
"Recorded test result {TestId}: {Outcome}",
result.TestId, result.Outcome);
return Task.CompletedTask;
}
/// <inheritdoc/>
public Task<TestEvidenceBundle> FinalizeSessionAsync(
TestEvidenceSession session,
CancellationToken ct = default)
{
if (session.IsFinalized)
{
throw new InvalidOperationException("Session is already finalized.");
}
session.MarkAsFinalized();
var results = session.GetResults();
var summary = ComputeSummary(results);
var merkleRoot = ComputeMerkleRoot(results);
var bundleId = GenerateBundleId(session.Metadata, merkleRoot);
var bundle = new TestEvidenceBundle(
BundleId: bundleId,
MerkleRoot: merkleRoot,
Metadata: session.Metadata,
Summary: summary,
Results: results,
FinalizedAt: _timeProvider.GetUtcNow(),
EvidenceLockerRef: $"evidence://{bundleId}");
_bundles[bundleId] = bundle;
_logger.LogInformation(
"Finalized test evidence bundle {BundleId} with {TotalTests} tests ({Passed} passed, {Failed} failed)",
bundleId, summary.TotalTests, summary.Passed, summary.Failed);
return Task.FromResult(bundle);
}
/// <inheritdoc/>
public Task<TestEvidenceBundle?> GetBundleAsync(
string bundleId,
CancellationToken ct = default)
{
_bundles.TryGetValue(bundleId, out var bundle);
return Task.FromResult(bundle);
}
private static TestSummary ComputeSummary(ImmutableArray<TestResultRecord> results)
{
var byCategory = results
.SelectMany(r => r.Categories.Select(c => (Category: c, Result: r)))
.GroupBy(x => x.Category)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var byBlastRadius = results
.SelectMany(r => r.BlastRadiusAnnotations.Select(b => (BlastRadius: b, Result: r)))
.GroupBy(x => x.BlastRadius)
.ToImmutableDictionary(g => g.Key, g => g.Count());
return new TestSummary(
TotalTests: results.Length,
Passed: results.Count(r => r.Outcome == TestOutcome.Passed),
Failed: results.Count(r => r.Outcome == TestOutcome.Failed),
Skipped: results.Count(r => r.Outcome == TestOutcome.Skipped),
TotalDuration: TimeSpan.FromTicks(results.Sum(r => r.Duration.Ticks)),
ResultsByCategory: byCategory,
ResultsByBlastRadius: byBlastRadius);
}
private static string ComputeMerkleRoot(ImmutableArray<TestResultRecord> results)
{
if (results.IsEmpty)
{
return ComputeSha256("empty");
}
// Compute leaf hashes
var leaves = results
.OrderBy(r => r.TestId)
.Select(r => ComputeResultHash(r))
.ToList();
// Build Merkle tree
while (leaves.Count > 1)
{
var newLevel = new List<string>();
for (int i = 0; i < leaves.Count; i += 2)
{
if (i + 1 < leaves.Count)
{
newLevel.Add(ComputeSha256(leaves[i] + leaves[i + 1]));
}
else
{
newLevel.Add(leaves[i]); // Odd leaf promoted
}
}
leaves = newLevel;
}
return leaves[0];
}
private static string ComputeResultHash(TestResultRecord result)
{
var json = JsonSerializer.Serialize(result, JsonOptions);
return ComputeSha256(json);
}
private static string GenerateBundleId(TestSessionMetadata metadata, string merkleRoot)
{
var input = $"{metadata.SessionId}:{metadata.TestSuiteId}:{merkleRoot}";
var hash = ComputeSha256(input);
return $"teb-{hash[..16]}";
}
private static string ComputeSha256(string input)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,230 @@
// <copyright file="ExplainabilityAssertions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_TEST_policy_explainability
// Task: PEXP-007, PEXP-008
using FluentAssertions;
namespace StellaOps.Testing.Explainability;
/// <summary>
/// Assertion helpers for verifying decision explainability.
/// </summary>
public static class ExplainabilityAssertions
{
/// <summary>
/// Assert that a decision has a complete explanation meeting requirements.
/// </summary>
/// <typeparam name="T">Type of the result.</typeparam>
/// <param name="result">The explained result to verify.</param>
/// <param name="requirements">Requirements the explanation must meet.</param>
public static void AssertHasExplanation<T>(
ExplainedResult<T> result,
ExplanationRequirements? requirements = null)
{
requirements ??= new ExplanationRequirements();
var explanation = result.Explanation;
explanation.Should().NotBeNull("Decision must include explanation");
explanation.DecisionId.Should().NotBeNullOrEmpty("Explanation must have ID");
explanation.DecisionType.Should().NotBeNullOrEmpty("Explanation must have decision type");
explanation.DecidedAt.Should().NotBe(default, "Explanation must have timestamp");
// Outcome requirements
explanation.Outcome.Should().NotBeNull("Explanation must have outcome");
explanation.Outcome.Value.Should().NotBeNullOrEmpty("Outcome must have value");
if (requirements.RequireHumanSummary)
{
explanation.Outcome.HumanReadableSummary.Should().NotBeNullOrEmpty(
"Outcome must include human-readable summary");
}
// Factor requirements
if (requirements.MinFactors > 0)
{
explanation.Factors.Length.Should().BeGreaterThanOrEqualTo(requirements.MinFactors,
$"Explanation must have at least {requirements.MinFactors} factors");
}
if (requirements.RequireFactorWeights)
{
foreach (var factor in explanation.Factors)
{
factor.Weight.Should().BeInRange(0, 1,
$"Factor '{factor.FactorId}' must have valid weight (0-1)");
}
}
if (requirements.RequireFactorSources)
{
foreach (var factor in explanation.Factors)
{
factor.SourceRef.Should().NotBeNullOrEmpty(
$"Factor '{factor.FactorId}' must have source reference");
}
}
// Metadata requirements
explanation.Metadata.Should().NotBeNull("Explanation must have metadata");
explanation.Metadata.EngineVersion.Should().NotBeNullOrEmpty(
"Metadata must include engine version");
if (requirements.RequireInputHashes)
{
explanation.Metadata.InputHashes.Should().NotBeEmpty(
"Metadata must include input hashes for reproducibility");
}
}
/// <summary>
/// Assert that explanation is reproducible across multiple evaluations.
/// </summary>
/// <typeparam name="TInput">Type of input.</typeparam>
/// <typeparam name="TOutput">Type of output.</typeparam>
/// <param name="service">The explainable service.</param>
/// <param name="input">Input to evaluate.</param>
/// <param name="iterations">Number of iterations to test.</param>
/// <param name="ct">Cancellation token.</param>
public static async Task AssertExplanationReproducibleAsync<TInput, TOutput>(
IExplainableDecision<TInput, TOutput> service,
TInput input,
int iterations = 3,
CancellationToken ct = default)
{
var results = new List<ExplainedResult<TOutput>>();
for (int i = 0; i < iterations; i++)
{
var result = await service.EvaluateWithExplanationAsync(input, ct);
results.Add(result);
}
// All explanations should have same factors (order may differ)
var firstFactorIds = results[0].Explanation.Factors
.Select(f => f.FactorId)
.OrderBy(id => id)
.ToList();
for (int i = 1; i < results.Count; i++)
{
var factorIds = results[i].Explanation.Factors
.Select(f => f.FactorId)
.OrderBy(id => id)
.ToList();
factorIds.Should().BeEquivalentTo(firstFactorIds,
$"Iteration {i} should have same factors as iteration 0");
}
// All explanations should reach same outcome
var firstOutcome = results[0].Explanation.Outcome.Value;
for (int i = 1; i < results.Count; i++)
{
results[i].Explanation.Outcome.Value.Should().Be(firstOutcome,
$"Iteration {i} should produce same outcome as iteration 0");
}
}
/// <summary>
/// Assert that an explanation contains a specific factor type.
/// </summary>
/// <param name="explanation">The explanation to check.</param>
/// <param name="factorType">The factor type to look for.</param>
/// <param name="minCount">Minimum number of factors of this type.</param>
public static void AssertContainsFactorType(
DecisionExplanation explanation,
string factorType,
int minCount = 1)
{
var matchingFactors = explanation.Factors
.Where(f => f.FactorType == factorType)
.ToList();
matchingFactors.Count.Should().BeGreaterThanOrEqualTo(minCount,
$"Explanation should contain at least {minCount} factor(s) of type '{factorType}'");
}
/// <summary>
/// Assert that an explanation triggered a specific rule.
/// </summary>
/// <param name="explanation">The explanation to check.</param>
/// <param name="ruleNamePattern">Pattern to match rule name.</param>
public static void AssertRuleTriggered(
DecisionExplanation explanation,
string ruleNamePattern)
{
var triggeredRule = explanation.AppliedRules
.FirstOrDefault(r => r.WasTriggered && r.RuleName.Contains(ruleNamePattern, StringComparison.OrdinalIgnoreCase));
triggeredRule.Should().NotBeNull(
$"Expected a triggered rule matching '{ruleNamePattern}'");
}
/// <summary>
/// Assert that the explanation has a valid human-readable summary.
/// </summary>
/// <param name="explanation">The explanation to check.</param>
public static void AssertHasValidSummary(DecisionExplanation explanation)
{
var summary = explanation.Outcome.HumanReadableSummary;
summary.Should().NotBeNullOrEmpty("Explanation must have summary");
summary.Should().NotContain("null", "Summary should not contain 'null'");
summary.Should().NotContain("{", "Summary should not contain JSON fragments");
summary.Should().NotContain("}", "Summary should not contain JSON fragments");
// Should start with capital letter
char.IsUpper(summary![0]).Should().BeTrue("Summary should start with capital letter");
}
/// <summary>
/// Assert that all contributing factors have valid weights that sum to approximately 1.
/// </summary>
/// <param name="explanation">The explanation to check.</param>
/// <param name="tolerance">Tolerance for weight sum (default 0.1).</param>
public static void AssertFactorWeightsValid(
DecisionExplanation explanation,
decimal tolerance = 0.1m)
{
var contributingFactors = explanation.Factors
.Where(f => f.Contribution > 0)
.ToList();
if (!contributingFactors.Any())
{
return; // No contributing factors, nothing to check
}
foreach (var factor in contributingFactors)
{
factor.Weight.Should().BeInRange(0, 1,
$"Factor '{factor.FactorId}' weight should be between 0 and 1");
}
var totalWeight = contributingFactors.Sum(f => f.Weight);
totalWeight.Should().BeApproximately(1.0m, tolerance,
"Contributing factor weights should approximately sum to 1");
}
/// <summary>
/// Assert that explanation metadata is complete for audit purposes.
/// </summary>
/// <param name="explanation">The explanation to check.</param>
public static void AssertAuditReady(DecisionExplanation explanation)
{
explanation.DecisionId.Should().NotBeNullOrEmpty("Audit requires decision ID");
explanation.DecidedAt.Should().NotBe(default, "Audit requires timestamp");
explanation.Metadata.EngineVersion.Should().NotBeNullOrEmpty("Audit requires engine version");
explanation.Metadata.PolicyVersion.Should().NotBeNullOrEmpty("Audit requires policy version");
explanation.Metadata.InputHashes.Should().NotBeEmpty("Audit requires input hashes");
// All factors should have source references for traceability
foreach (var factor in explanation.Factors)
{
factor.SourceRef.Should().NotBeNullOrEmpty(
$"Audit requires source reference for factor '{factor.FactorId}'");
}
}
}

View File

@@ -0,0 +1,42 @@
// <copyright file="IExplainableDecision.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_TEST_policy_explainability
// Task: PEXP-003
namespace StellaOps.Testing.Explainability;
/// <summary>
/// Interface for services that produce explainable decisions.
/// </summary>
/// <typeparam name="TInput">Type of input to the decision.</typeparam>
/// <typeparam name="TOutput">Type of output from the decision.</typeparam>
public interface IExplainableDecision<TInput, TOutput>
{
/// <summary>
/// Evaluate input and produce output with explanation.
/// </summary>
/// <param name="input">The input to evaluate.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Result with explanation.</returns>
Task<ExplainedResult<TOutput>> EvaluateWithExplanationAsync(
TInput input,
CancellationToken ct = default);
}
/// <summary>
/// Marker interface for decisions that support explanation.
/// </summary>
public interface IExplainable
{
/// <summary>
/// Gets whether explanations are enabled.
/// </summary>
bool ExplanationsEnabled { get; }
/// <summary>
/// Enable or disable explanations.
/// </summary>
/// <param name="enabled">Whether to enable explanations.</param>
void SetExplanationsEnabled(bool enabled);
}

View File

@@ -0,0 +1,136 @@
// <copyright file="Models.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_TEST_policy_explainability
// Task: PEXP-001, PEXP-002
using System.Collections.Immutable;
namespace StellaOps.Testing.Explainability;
/// <summary>
/// Machine-readable explanation of an automated decision.
/// </summary>
/// <param name="DecisionId">Unique identifier for this decision.</param>
/// <param name="DecisionType">Type of decision (e.g., "VexConsensus", "RiskScore", "PolicyVerdict").</param>
/// <param name="DecidedAt">Timestamp when decision was made.</param>
/// <param name="Outcome">The decision outcome.</param>
/// <param name="Factors">Factors that contributed to the decision.</param>
/// <param name="AppliedRules">Rules that were applied during evaluation.</param>
/// <param name="Metadata">Additional metadata about the evaluation.</param>
public sealed record DecisionExplanation(
string DecisionId,
string DecisionType,
DateTimeOffset DecidedAt,
DecisionOutcome Outcome,
ImmutableArray<ExplanationFactor> Factors,
ImmutableArray<ExplanationRule> AppliedRules,
ExplanationMetadata Metadata);
/// <summary>
/// The outcome of a decision.
/// </summary>
/// <param name="Value">The outcome value (e.g., "not_affected", "8.5", "PASS").</param>
/// <param name="PreviousValue">Previous value for tracking changes.</param>
/// <param name="Confidence">Confidence level in the decision.</param>
/// <param name="HumanReadableSummary">Human-readable explanation of the outcome.</param>
public sealed record DecisionOutcome(
string Value,
string? PreviousValue,
ConfidenceLevel Confidence,
string? HumanReadableSummary);
/// <summary>
/// Confidence level in a decision.
/// </summary>
public enum ConfidenceLevel
{
/// <summary>Unknown confidence.</summary>
Unknown,
/// <summary>Low confidence.</summary>
Low,
/// <summary>Medium confidence.</summary>
Medium,
/// <summary>High confidence.</summary>
High,
/// <summary>Very high confidence.</summary>
VeryHigh
}
/// <summary>
/// A factor that contributed to the decision.
/// </summary>
/// <param name="FactorId">Unique identifier for this factor.</param>
/// <param name="FactorType">Type of factor (e.g., "VexStatement", "ReachabilityEvidence", "CvssScore").</param>
/// <param name="Description">Human-readable description of the factor.</param>
/// <param name="Weight">Weight of this factor (0.0 to 1.0).</param>
/// <param name="Contribution">Actual contribution to the outcome.</param>
/// <param name="Attributes">Additional attributes specific to the factor type.</param>
/// <param name="SourceRef">Reference to source document or evidence.</param>
public sealed record ExplanationFactor(
string FactorId,
string FactorType,
string Description,
decimal Weight,
decimal Contribution,
ImmutableDictionary<string, string> Attributes,
string? SourceRef);
/// <summary>
/// A rule that was applied during decision evaluation.
/// </summary>
/// <param name="RuleId">Unique identifier for the rule.</param>
/// <param name="RuleName">Human-readable name of the rule.</param>
/// <param name="RuleVersion">Version of the rule.</param>
/// <param name="WasTriggered">Whether the rule was triggered.</param>
/// <param name="TriggerReason">Reason why the rule was or was not triggered.</param>
/// <param name="Impact">Impact on the final outcome.</param>
public sealed record ExplanationRule(
string RuleId,
string RuleName,
string RuleVersion,
bool WasTriggered,
string? TriggerReason,
decimal Impact);
/// <summary>
/// Metadata about the evaluation process.
/// </summary>
/// <param name="EngineVersion">Version of the evaluation engine.</param>
/// <param name="PolicyVersion">Version of the policy used.</param>
/// <param name="InputHashes">Hashes of input data for reproducibility.</param>
/// <param name="EvaluationDuration">Time taken to evaluate.</param>
public sealed record ExplanationMetadata(
string EngineVersion,
string PolicyVersion,
ImmutableDictionary<string, string> InputHashes,
TimeSpan EvaluationDuration);
/// <summary>
/// Result wrapper that includes both the result and its explanation.
/// </summary>
/// <typeparam name="T">Type of the result.</typeparam>
/// <param name="Result">The actual result.</param>
/// <param name="Explanation">Explanation of how the result was determined.</param>
public sealed record ExplainedResult<T>(
T Result,
DecisionExplanation Explanation);
/// <summary>
/// Requirements for explanation completeness.
/// </summary>
/// <param name="RequireHumanSummary">Whether a human-readable summary is required.</param>
/// <param name="MinFactors">Minimum number of factors required.</param>
/// <param name="RequireFactorWeights">Whether all factors must have valid weights.</param>
/// <param name="RequireFactorSources">Whether all factors must have source references.</param>
/// <param name="RequireInputHashes">Whether input hashes are required for reproducibility.</param>
public sealed record ExplanationRequirements(
bool RequireHumanSummary = true,
int MinFactors = 1,
bool RequireFactorWeights = true,
bool RequireFactorSources = false,
bool RequireInputHashes = true);

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Decision explainability testing framework for policy and VEX consensus assertions</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.Explainability.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,146 @@
// <copyright file="Models.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_TEST_policy_explainability
// Task: PEXP-009, PEXP-010
using System.Collections.Immutable;
using System.Text.RegularExpressions;
namespace StellaOps.Testing.Policy;
/// <summary>
/// Represents a versioned policy configuration.
/// </summary>
/// <param name="VersionId">Unique version identifier (e.g., commit hash or version tag).</param>
/// <param name="PolicyType">Type of policy (e.g., "K4Lattice", "VexPrecedence", "RiskScoring").</param>
/// <param name="Parameters">Policy parameters.</param>
/// <param name="CreatedAt">When this version was created.</param>
public sealed record PolicyVersion(
string VersionId,
string PolicyType,
ImmutableDictionary<string, string> Parameters,
DateTimeOffset CreatedAt);
/// <summary>
/// A test input for policy evaluation.
/// </summary>
/// <param name="InputId">Unique identifier for this test input.</param>
/// <param name="Description">Human-readable description.</param>
/// <param name="Input">The actual input data.</param>
/// <param name="ExpectedOutcome">Optional expected outcome for assertion.</param>
public sealed record PolicyTestInput(
string InputId,
string Description,
object Input,
string? ExpectedOutcome = null);
/// <summary>
/// Result of evaluating a policy.
/// </summary>
/// <param name="Outcome">The outcome value.</param>
/// <param name="Score">Numeric score if applicable.</param>
/// <param name="ContributingFactors">Factors that contributed to the outcome.</param>
/// <param name="EvaluatedAt">When the evaluation occurred.</param>
public sealed record PolicyEvaluationResult(
string Outcome,
decimal Score,
ImmutableArray<string> ContributingFactors,
DateTimeOffset EvaluatedAt);
/// <summary>
/// Result of computing behavioral diff between policies.
/// </summary>
/// <param name="BaselinePolicy">The baseline policy version.</param>
/// <param name="NewPolicy">The new policy version.</param>
/// <param name="TotalInputsTested">Total number of inputs tested.</param>
/// <param name="InputsWithChangedBehavior">Number of inputs with changed behavior.</param>
/// <param name="Diffs">Individual input differences.</param>
/// <param name="Summary">Human-readable summary.</param>
public sealed record PolicyDiffResult(
PolicyVersion BaselinePolicy,
PolicyVersion NewPolicy,
int TotalInputsTested,
int InputsWithChangedBehavior,
ImmutableArray<PolicyInputDiff> Diffs,
string Summary);
/// <summary>
/// Difference in behavior for a single input.
/// </summary>
/// <param name="InputId">The input that changed.</param>
/// <param name="InputDescription">Description of the input.</param>
/// <param name="BaselineOutcome">Outcome with baseline policy.</param>
/// <param name="NewOutcome">Outcome with new policy.</param>
/// <param name="Delta">Details of the change.</param>
public sealed record PolicyInputDiff(
string InputId,
string InputDescription,
PolicyEvaluationResult BaselineOutcome,
PolicyEvaluationResult NewOutcome,
PolicyDelta Delta);
/// <summary>
/// Details of a behavioral change between policies.
/// </summary>
/// <param name="OutcomeChanged">Whether the outcome value changed.</param>
/// <param name="BaselineOutcome">Previous outcome.</param>
/// <param name="NewOutcome">New outcome.</param>
/// <param name="ScoreDelta">Change in score.</param>
/// <param name="AddedFactors">Factors added in new policy.</param>
/// <param name="RemovedFactors">Factors removed from baseline.</param>
/// <param name="ChangedFactors">Factors with changed values.</param>
public sealed record PolicyDelta(
bool OutcomeChanged,
string BaselineOutcome,
string NewOutcome,
decimal ScoreDelta,
ImmutableArray<string> AddedFactors,
ImmutableArray<string> RemovedFactors,
ImmutableArray<FactorChange> ChangedFactors);
/// <summary>
/// A change in a contributing factor.
/// </summary>
/// <param name="FactorId">Factor identifier.</param>
/// <param name="ChangeType">Type of change (e.g., "WeightChanged", "ThresholdChanged").</param>
/// <param name="OldValue">Previous value.</param>
/// <param name="NewValue">New value.</param>
public sealed record FactorChange(
string FactorId,
string ChangeType,
string OldValue,
string NewValue);
/// <summary>
/// Expected policy diff for regression testing.
/// </summary>
/// <param name="BaselineVersion">Baseline policy version.</param>
/// <param name="NewVersion">New policy version.</param>
/// <param name="ExpectedDiffs">Expected behavioral changes.</param>
public sealed record ExpectedPolicyDiff(
string BaselineVersion,
string NewVersion,
ImmutableArray<ExpectedInputChange> ExpectedDiffs);
/// <summary>
/// Expected change for a specific input.
/// </summary>
/// <param name="InputId">The input identifier.</param>
/// <param name="ExpectedOutcome">Expected new outcome.</param>
/// <param name="Justification">Why this change is expected.</param>
public sealed record ExpectedInputChange(
string InputId,
string ExpectedOutcome,
string Justification);
/// <summary>
/// Allowed policy change for regression testing.
/// </summary>
/// <param name="InputPattern">Regex pattern matching allowed input IDs.</param>
/// <param name="AllowedOutcomes">Allowed outcome values (empty means any).</param>
/// <param name="Justification">Why this change is allowed.</param>
public sealed record AllowedPolicyChange(
Regex InputPattern,
ImmutableArray<string> AllowedOutcomes,
string Justification);

View File

@@ -0,0 +1,213 @@
// <copyright file="PolicyDiffEngine.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_TEST_policy_explainability
// Task: PEXP-010
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
namespace StellaOps.Testing.Policy;
/// <summary>
/// Computes behavioral diff between policy versions.
/// </summary>
public sealed class PolicyDiffEngine
{
private readonly IPolicyEvaluator _evaluator;
private readonly ILogger<PolicyDiffEngine> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="PolicyDiffEngine"/> class.
/// </summary>
/// <param name="evaluator">Policy evaluator.</param>
/// <param name="logger">Logger instance.</param>
public PolicyDiffEngine(IPolicyEvaluator evaluator, ILogger<PolicyDiffEngine> logger)
{
_evaluator = evaluator;
_logger = logger;
}
/// <summary>
/// Compute behavioral diff for a set of test inputs.
/// </summary>
/// <param name="baselinePolicy">Baseline policy version.</param>
/// <param name="newPolicy">New policy version.</param>
/// <param name="testInputs">Test inputs to evaluate.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Policy diff result.</returns>
public async Task<PolicyDiffResult> ComputeDiffAsync(
PolicyVersion baselinePolicy,
PolicyVersion newPolicy,
IEnumerable<PolicyTestInput> testInputs,
CancellationToken ct = default)
{
var inputList = testInputs.ToList();
var diffs = new List<PolicyInputDiff>();
_logger.LogInformation(
"Computing policy diff: {BaselineVersion} -> {NewVersion}, {InputCount} inputs",
baselinePolicy.VersionId, newPolicy.VersionId, inputList.Count);
foreach (var input in inputList)
{
ct.ThrowIfCancellationRequested();
// Evaluate with baseline policy
var baselineResult = await _evaluator.EvaluateAsync(
input.Input, baselinePolicy, ct);
// Evaluate with new policy
var newResult = await _evaluator.EvaluateAsync(
input.Input, newPolicy, ct);
if (!ResultsEqual(baselineResult, newResult))
{
var delta = ComputeDelta(baselineResult, newResult);
diffs.Add(new PolicyInputDiff(
InputId: input.InputId,
InputDescription: input.Description,
BaselineOutcome: baselineResult,
NewOutcome: newResult,
Delta: delta));
_logger.LogDebug(
"Input '{InputId}' changed: {Baseline} -> {New}",
input.InputId, baselineResult.Outcome, newResult.Outcome);
}
}
var summary = GenerateSummary(baselinePolicy, newPolicy, diffs);
_logger.LogInformation(
"Policy diff complete: {ChangedCount}/{TotalCount} inputs changed",
diffs.Count, inputList.Count);
return new PolicyDiffResult(
BaselinePolicy: baselinePolicy,
NewPolicy: newPolicy,
TotalInputsTested: inputList.Count,
InputsWithChangedBehavior: diffs.Count,
Diffs: [.. diffs],
Summary: summary);
}
private static bool ResultsEqual(PolicyEvaluationResult a, PolicyEvaluationResult b)
{
return a.Outcome == b.Outcome && a.Score == b.Score;
}
private static PolicyDelta ComputeDelta(
PolicyEvaluationResult baseline,
PolicyEvaluationResult newResult)
{
var addedFactors = newResult.ContributingFactors
.Except(baseline.ContributingFactors)
.ToImmutableArray();
var removedFactors = baseline.ContributingFactors
.Except(newResult.ContributingFactors)
.ToImmutableArray();
return new PolicyDelta(
OutcomeChanged: baseline.Outcome != newResult.Outcome,
BaselineOutcome: baseline.Outcome,
NewOutcome: newResult.Outcome,
ScoreDelta: newResult.Score - baseline.Score,
AddedFactors: addedFactors,
RemovedFactors: removedFactors,
ChangedFactors: []); // Factor changes require more detailed comparison
}
private static string GenerateSummary(
PolicyVersion baseline,
PolicyVersion newPolicy,
List<PolicyInputDiff> diffs)
{
if (diffs.Count == 0)
{
return $"No behavioral changes between {baseline.VersionId} and {newPolicy.VersionId}.";
}
var outcomeChanges = diffs.Count(d => d.Delta.OutcomeChanged);
var scoreOnlyChanges = diffs.Count - outcomeChanges;
var parts = new List<string>
{
$"{diffs.Count} input(s) changed behavior"
};
if (outcomeChanges > 0)
{
parts.Add($"{outcomeChanges} outcome change(s)");
}
if (scoreOnlyChanges > 0)
{
parts.Add($"{scoreOnlyChanges} score-only change(s)");
}
return string.Join(", ", parts) + ".";
}
}
/// <summary>
/// Interface for policy evaluation.
/// </summary>
public interface IPolicyEvaluator
{
/// <summary>
/// Evaluate an input with a specific policy version.
/// </summary>
/// <param name="input">The input to evaluate.</param>
/// <param name="policy">The policy version to use.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Evaluation result.</returns>
Task<PolicyEvaluationResult> EvaluateAsync(
object input,
PolicyVersion policy,
CancellationToken ct = default);
}
/// <summary>
/// Mock policy evaluator for testing.
/// </summary>
public sealed class MockPolicyEvaluator : IPolicyEvaluator
{
private readonly Dictionary<(string inputId, string policyVersion), PolicyEvaluationResult> _results = new();
/// <summary>
/// Configure a specific result for an input/policy combination.
/// </summary>
/// <param name="inputId">Input identifier.</param>
/// <param name="policyVersion">Policy version.</param>
/// <param name="result">The result to return.</param>
public void SetResult(string inputId, string policyVersion, PolicyEvaluationResult result)
{
_results[(inputId, policyVersion)] = result;
}
/// <inheritdoc/>
public Task<PolicyEvaluationResult> EvaluateAsync(
object input,
PolicyVersion policy,
CancellationToken ct = default)
{
var inputId = input is PolicyTestInput pti ? pti.InputId :
input is string s ? s :
input?.ToString() ?? "unknown";
if (_results.TryGetValue((inputId, policy.VersionId), out var result))
{
return Task.FromResult(result);
}
// Default result if not configured
return Task.FromResult(new PolicyEvaluationResult(
Outcome: "unknown",
Score: 0m,
ContributingFactors: [],
EvaluatedAt: DateTimeOffset.UtcNow));
}
}

View File

@@ -0,0 +1,190 @@
// <copyright file="PolicyRegressionTestBase.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_004_TEST_policy_explainability
// Task: PEXP-011
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
namespace StellaOps.Testing.Policy;
/// <summary>
/// Base class for policy regression tests.
/// </summary>
public abstract class PolicyRegressionTestBase
{
/// <summary>
/// Gets the policy diff engine.
/// </summary>
protected PolicyDiffEngine DiffEngine { get; private set; } = null!;
/// <summary>
/// Gets the policy evaluator.
/// </summary>
protected IPolicyEvaluator Evaluator { get; private set; } = null!;
/// <summary>
/// Initializes the test infrastructure.
/// </summary>
protected virtual void Initialize()
{
Evaluator = CreateEvaluator();
DiffEngine = new PolicyDiffEngine(
Evaluator,
NullLogger<PolicyDiffEngine>.Instance);
}
/// <summary>
/// Load a policy version by identifier.
/// </summary>
/// <param name="version">Version identifier (e.g., "v1", "previous", "current").</param>
/// <returns>Policy version.</returns>
protected abstract PolicyVersion LoadPolicy(string version);
/// <summary>
/// Get the standard test inputs for this policy type.
/// </summary>
/// <returns>Enumerable of test inputs.</returns>
protected abstract IEnumerable<PolicyTestInput> GetStandardTestInputs();
/// <summary>
/// Create the policy evaluator to use.
/// </summary>
/// <returns>Policy evaluator instance.</returns>
protected abstract IPolicyEvaluator CreateEvaluator();
/// <summary>
/// Load expected diff between two versions.
/// </summary>
/// <param name="diffId">Diff identifier (e.g., "v1-to-v2").</param>
/// <returns>Expected policy diff.</returns>
protected virtual ExpectedPolicyDiff? LoadExpectedDiff(string diffId)
{
// Default implementation returns null - subclasses can override
return null;
}
/// <summary>
/// Load allowed changes for regression testing.
/// </summary>
/// <returns>Collection of allowed changes.</returns>
protected virtual IEnumerable<AllowedPolicyChange> LoadAllowedChanges()
{
// Default: no changes allowed
return [];
}
/// <summary>
/// Assert that policy change produces only expected diffs.
/// </summary>
/// <param name="previousVersion">Previous policy version identifier.</param>
/// <param name="currentVersion">Current policy version identifier.</param>
/// <param name="expectedDiff">Expected diff (null to fail on any change).</param>
/// <param name="ct">Cancellation token.</param>
protected async Task AssertPolicyChangeProducesExpectedDiffAsync(
string previousVersion,
string currentVersion,
ExpectedPolicyDiff? expectedDiff,
CancellationToken ct = default)
{
var previousPolicy = LoadPolicy(previousVersion);
var currentPolicy = LoadPolicy(currentVersion);
var actualDiff = await DiffEngine.ComputeDiffAsync(
previousPolicy,
currentPolicy,
GetStandardTestInputs(),
ct);
if (expectedDiff is null)
{
actualDiff.InputsWithChangedBehavior.Should().Be(0,
"No behavioral changes expected");
return;
}
actualDiff.InputsWithChangedBehavior.Should().Be(
expectedDiff.ExpectedDiffs.Length,
"Number of changed inputs should match expected");
foreach (var expected in expectedDiff.ExpectedDiffs)
{
var actual = actualDiff.Diffs
.FirstOrDefault(d => d.InputId == expected.InputId);
actual.Should().NotBeNull(
$"Expected change for input '{expected.InputId}' not found");
actual!.Delta.NewOutcome.Should().Be(expected.ExpectedOutcome,
$"Outcome mismatch for input '{expected.InputId}'");
}
}
/// <summary>
/// Assert that policy change has no unexpected regressions.
/// </summary>
/// <param name="previousVersion">Previous policy version identifier.</param>
/// <param name="currentVersion">Current policy version identifier.</param>
/// <param name="ct">Cancellation token.</param>
protected async Task AssertNoUnexpectedRegressionsAsync(
string previousVersion,
string currentVersion,
CancellationToken ct = default)
{
var previousPolicy = LoadPolicy(previousVersion);
var currentPolicy = LoadPolicy(currentVersion);
var allowedChanges = LoadAllowedChanges().ToList();
var diff = await DiffEngine.ComputeDiffAsync(
previousPolicy,
currentPolicy,
GetStandardTestInputs(),
ct);
var unexpectedChanges = diff.Diffs
.Where(d => !IsChangeAllowed(d, allowedChanges))
.ToList();
unexpectedChanges.Should().BeEmpty(
$"Found unexpected policy regressions: {FormatChanges(unexpectedChanges)}");
}
/// <summary>
/// Check if a change is in the allowed list.
/// </summary>
private static bool IsChangeAllowed(
PolicyInputDiff diff,
IEnumerable<AllowedPolicyChange> allowedChanges)
{
return allowedChanges.Any(a =>
a.InputPattern.IsMatch(diff.InputId) &&
(a.AllowedOutcomes.IsDefaultOrEmpty ||
a.AllowedOutcomes.Contains(diff.Delta.NewOutcome)));
}
/// <summary>
/// Format unexpected changes for error message.
/// </summary>
private static string FormatChanges(List<PolicyInputDiff> changes)
{
if (changes.Count == 0)
{
return "none";
}
var descriptions = changes
.Take(5)
.Select(c => $"'{c.InputId}': {c.Delta.BaselineOutcome} -> {c.Delta.NewOutcome}");
var result = string.Join(", ", descriptions);
if (changes.Count > 5)
{
result += $" ... and {changes.Count - 5} more";
}
return result;
}
}

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Policy-as-code testing framework with diff-based regression detection</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.Policy.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,508 @@
// <copyright file="ReplayTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_002_TEST_trace_replay_evidence
// Task: TREP-007, TREP-008
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Replay.Anonymization;
using StellaOps.Testing.Temporal;
using Xunit;
namespace StellaOps.Testing.Replay.Tests;
[Trait("Category", "Unit")]
public sealed class InMemoryTraceCorpusManagerTests
{
private readonly SimulatedTimeProvider _timeProvider;
private readonly InMemoryTraceCorpusManager _manager;
public InMemoryTraceCorpusManagerTests()
{
_timeProvider = new SimulatedTimeProvider(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero));
_manager = new InMemoryTraceCorpusManager(_timeProvider);
}
[Fact]
public async Task ImportAsync_CreatesCorpusEntry()
{
// Arrange
var trace = CreateSimpleTrace("trace-1");
var classification = CreateClassification(TraceCategory.Scan, TraceComplexity.Simple);
// Act
var entry = await _manager.ImportAsync(trace, classification, TestContext.Current.CancellationToken);
// Assert
entry.Should().NotBeNull();
entry.EntryId.Should().StartWith("corpus-");
entry.Trace.Should().Be(trace);
entry.Classification.Should().Be(classification);
entry.ImportedAt.Should().Be(_timeProvider.GetUtcNow());
}
[Fact]
public async Task ImportAsync_GeneratesSequentialIds()
{
// Arrange
var trace1 = CreateSimpleTrace("trace-1");
var trace2 = CreateSimpleTrace("trace-2");
var classification = CreateClassification(TraceCategory.Scan, TraceComplexity.Simple);
// Act
var entry1 = await _manager.ImportAsync(trace1, classification, TestContext.Current.CancellationToken);
var entry2 = await _manager.ImportAsync(trace2, classification, TestContext.Current.CancellationToken);
// Assert
entry1.EntryId.Should().Be("corpus-000001");
entry2.EntryId.Should().Be("corpus-000002");
}
[Fact]
public async Task QueryAsync_ReturnsAllEntries_WhenNoFilter()
{
// Arrange
var trace1 = CreateSimpleTrace("trace-1");
var trace2 = CreateSimpleTrace("trace-2");
var classification = CreateClassification(TraceCategory.Scan, TraceComplexity.Simple);
await _manager.ImportAsync(trace1, classification, TestContext.Current.CancellationToken);
await _manager.ImportAsync(trace2, classification, TestContext.Current.CancellationToken);
// Act
var results = await _manager.QueryAsync(new TraceQuery(), TestContext.Current.CancellationToken).ToListAsync(TestContext.Current.CancellationToken);
// Assert
results.Should().HaveCount(2);
}
[Fact]
public async Task QueryAsync_FiltersByCategory()
{
// Arrange
var scanTrace = CreateSimpleTrace("scan-1");
var authTrace = CreateSimpleTrace("auth-1");
await _manager.ImportAsync(scanTrace, CreateClassification(TraceCategory.Scan, TraceComplexity.Simple), TestContext.Current.CancellationToken);
await _manager.ImportAsync(authTrace, CreateClassification(TraceCategory.Auth, TraceComplexity.Simple), TestContext.Current.CancellationToken);
// Act
var results = await _manager.QueryAsync(
new TraceQuery(Category: TraceCategory.Scan),
TestContext.Current.CancellationToken).ToListAsync(TestContext.Current.CancellationToken);
// Assert
results.Should().HaveCount(1);
results[0].Classification.Category.Should().Be(TraceCategory.Scan);
}
[Fact]
public async Task QueryAsync_FiltersByMinComplexity()
{
// Arrange
var simpleTrace = CreateSimpleTrace("simple-1");
var complexTrace = CreateSimpleTrace("complex-1");
await _manager.ImportAsync(simpleTrace, CreateClassification(TraceCategory.Scan, TraceComplexity.Simple), TestContext.Current.CancellationToken);
await _manager.ImportAsync(complexTrace, CreateClassification(TraceCategory.Scan, TraceComplexity.Complex), TestContext.Current.CancellationToken);
// Act
var results = await _manager.QueryAsync(
new TraceQuery(MinComplexity: TraceComplexity.Medium),
TestContext.Current.CancellationToken).ToListAsync(TestContext.Current.CancellationToken);
// Assert
results.Should().HaveCount(1);
results[0].Classification.Complexity.Should().Be(TraceComplexity.Complex);
}
[Fact]
public async Task QueryAsync_FiltersByRequiredTags()
{
// Arrange
var trace1 = CreateSimpleTrace("trace-1");
var trace2 = CreateSimpleTrace("trace-2");
await _manager.ImportAsync(trace1, CreateClassificationWithTags(TraceCategory.Scan, ["critical", "sbom"]), TestContext.Current.CancellationToken);
await _manager.ImportAsync(trace2, CreateClassificationWithTags(TraceCategory.Scan, ["minor"]), TestContext.Current.CancellationToken);
// Act
var results = await _manager.QueryAsync(
new TraceQuery(RequiredTags: ["critical"]),
TestContext.Current.CancellationToken).ToListAsync(TestContext.Current.CancellationToken);
// Assert
results.Should().HaveCount(1);
results[0].Classification.Tags.Should().Contain("critical");
}
[Fact]
public async Task QueryAsync_FiltersByFailureMode()
{
// Arrange
var successTrace = CreateSimpleTrace("success-1");
var failTrace = CreateSimpleTrace("fail-1");
await _manager.ImportAsync(successTrace, CreateClassification(TraceCategory.Scan, TraceComplexity.Simple), TestContext.Current.CancellationToken);
await _manager.ImportAsync(failTrace, CreateClassificationWithFailure(TraceCategory.Scan, "timeout"), TestContext.Current.CancellationToken);
// Act
var results = await _manager.QueryAsync(
new TraceQuery(FailureMode: "timeout"),
TestContext.Current.CancellationToken).ToListAsync(TestContext.Current.CancellationToken);
// Assert
results.Should().HaveCount(1);
results[0].Classification.FailureMode.Should().Be("timeout");
}
[Fact]
public async Task QueryAsync_RespectsLimit()
{
// Arrange
for (int i = 0; i < 10; i++)
{
await _manager.ImportAsync(
CreateSimpleTrace($"trace-{i}"),
CreateClassification(TraceCategory.Scan, TraceComplexity.Simple),
TestContext.Current.CancellationToken);
}
// Act
var results = await _manager.QueryAsync(
new TraceQuery(Limit: 5),
TestContext.Current.CancellationToken).ToListAsync(TestContext.Current.CancellationToken);
// Assert
results.Should().HaveCount(5);
}
[Fact]
public async Task GetStatisticsAsync_ReturnsCorrectCounts()
{
// Arrange
await _manager.ImportAsync(CreateSimpleTrace("1"), CreateClassification(TraceCategory.Scan, TraceComplexity.Simple), TestContext.Current.CancellationToken);
await _manager.ImportAsync(CreateSimpleTrace("2"), CreateClassification(TraceCategory.Scan, TraceComplexity.Complex), TestContext.Current.CancellationToken);
await _manager.ImportAsync(CreateSimpleTrace("3"), CreateClassification(TraceCategory.Auth, TraceComplexity.Simple), TestContext.Current.CancellationToken);
// Act
var stats = await _manager.GetStatisticsAsync(TestContext.Current.CancellationToken);
// Assert
stats.TotalTraces.Should().Be(3);
stats.TracesByCategory[TraceCategory.Scan].Should().Be(2);
stats.TracesByCategory[TraceCategory.Auth].Should().Be(1);
stats.TracesByComplexity[TraceComplexity.Simple].Should().Be(2);
stats.TracesByComplexity[TraceComplexity.Complex].Should().Be(1);
}
[Fact]
public async Task GetStatisticsAsync_TracksOldestAndNewest()
{
// Arrange
var firstTime = _timeProvider.GetUtcNow();
await _manager.ImportAsync(CreateSimpleTrace("1"), CreateClassification(TraceCategory.Scan, TraceComplexity.Simple), TestContext.Current.CancellationToken);
_timeProvider.Advance(TimeSpan.FromHours(1));
var lastTime = _timeProvider.GetUtcNow();
await _manager.ImportAsync(CreateSimpleTrace("2"), CreateClassification(TraceCategory.Scan, TraceComplexity.Simple), TestContext.Current.CancellationToken);
// Act
var stats = await _manager.GetStatisticsAsync(TestContext.Current.CancellationToken);
// Assert
stats.OldestTrace.Should().Be(firstTime);
stats.NewestTrace.Should().Be(lastTime);
}
[Fact]
public async Task GetStatisticsAsync_ReturnsNullTimestamps_WhenEmpty()
{
// Act
var stats = await _manager.GetStatisticsAsync(TestContext.Current.CancellationToken);
// Assert
stats.TotalTraces.Should().Be(0);
stats.OldestTrace.Should().BeNull();
stats.NewestTrace.Should().BeNull();
}
private static AnonymizedTrace CreateSimpleTrace(string traceId)
{
return new AnonymizedTrace(
TraceId: traceId,
OriginalTraceIdHash: "hash",
CapturedAt: DateTimeOffset.UtcNow,
AnonymizedAt: DateTimeOffset.UtcNow,
Type: TraceType.Scan,
Spans: [
new AnonymizedSpan(
SpanId: "span-1",
ParentSpanId: null,
OperationName: "TestOperation",
StartTime: DateTimeOffset.UtcNow,
Duration: TimeSpan.FromMilliseconds(100),
Attributes: ImmutableDictionary<string, string>.Empty,
Events: [])
],
Manifest: new AnonymizationManifest(0, 0, 0, [], "1.0.0"),
TotalDuration: TimeSpan.FromMilliseconds(100));
}
private static TraceClassification CreateClassification(TraceCategory category, TraceComplexity complexity) =>
new(category, complexity, [], null);
private static TraceClassification CreateClassificationWithTags(TraceCategory category, string[] tags) =>
new(category, TraceComplexity.Simple, [.. tags], null);
private static TraceClassification CreateClassificationWithFailure(TraceCategory category, string failureMode) =>
new(category, TraceComplexity.Simple, [], failureMode);
}
[Trait("Category", "Unit")]
public sealed class DefaultReplayOrchestratorTests
{
private readonly SimulatedTimeProvider _timeProvider;
private readonly DefaultReplayOrchestrator _orchestrator;
public DefaultReplayOrchestratorTests()
{
_timeProvider = new SimulatedTimeProvider(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero));
_orchestrator = new DefaultReplayOrchestrator(
NullLogger<DefaultReplayOrchestrator>.Instance);
}
[Fact]
public async Task ReplayAsync_SuccessfullyReplaysTrace()
{
// Arrange
var trace = CreateSimpleTrace("trace-1");
// Act
var result = await _orchestrator.ReplayAsync(trace, _timeProvider, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
result.FailureReason.Should().BeNull();
}
[Fact]
public async Task ReplayAsync_AdvancesSimulatedTime()
{
// Arrange
var startTime = _timeProvider.GetUtcNow();
var trace = CreateTraceWithDuration("trace-1", TimeSpan.FromMinutes(5));
// Act
await _orchestrator.ReplayAsync(trace, _timeProvider, TestContext.Current.CancellationToken);
// Assert
var endTime = _timeProvider.GetUtcNow();
(endTime - startTime).Should().Be(TimeSpan.FromMinutes(5));
}
[Fact]
public async Task ReplayAsync_ComputesOutputHash()
{
// Arrange
var trace = CreateSimpleTrace("trace-1");
// Act
var result = await _orchestrator.ReplayAsync(trace, _timeProvider, TestContext.Current.CancellationToken);
// Assert
result.OutputHash.Should().NotBeNullOrEmpty();
result.OutputHash.Should().HaveLength(64); // SHA-256 hex
}
[Fact]
public async Task ReplayAsync_OutputHashIsDeterministic()
{
// Arrange
var trace = CreateSimpleTrace("trace-1");
// Act
var result1 = await _orchestrator.ReplayAsync(trace, _timeProvider, TestContext.Current.CancellationToken);
_timeProvider.JumpTo(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero)); // Reset time
var result2 = await _orchestrator.ReplayAsync(trace, _timeProvider, TestContext.Current.CancellationToken);
// Assert
result1.OutputHash.Should().Be(result2.OutputHash);
}
[Fact]
public async Task ReplayAsync_ReturnsSpanResults()
{
// Arrange
var trace = CreateTraceWithMultipleSpans("trace-1", 3);
// Act
var result = await _orchestrator.ReplayAsync(trace, _timeProvider, TestContext.Current.CancellationToken);
// Assert
result.SpanResults.Should().HaveCount(3);
result.SpanResults.Should().AllSatisfy(s => s.Success.Should().BeTrue());
}
[Fact]
public async Task ReplayAsync_RespectsCancellation()
{
// Arrange
var trace = CreateTraceWithMultipleSpans("trace-1", 10);
using var cts = new CancellationTokenSource();
cts.Cancel();
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
await _orchestrator.ReplayAsync(trace, _timeProvider, cts.Token));
}
private static AnonymizedTrace CreateSimpleTrace(string traceId)
{
return new AnonymizedTrace(
TraceId: traceId,
OriginalTraceIdHash: "hash",
CapturedAt: DateTimeOffset.UtcNow,
AnonymizedAt: DateTimeOffset.UtcNow,
Type: TraceType.Scan,
Spans: [
new AnonymizedSpan(
SpanId: "span-1",
ParentSpanId: null,
OperationName: "TestOperation",
StartTime: DateTimeOffset.UtcNow,
Duration: TimeSpan.FromMilliseconds(100),
Attributes: ImmutableDictionary<string, string>.Empty,
Events: [])
],
Manifest: new AnonymizationManifest(0, 0, 0, [], "1.0.0"),
TotalDuration: TimeSpan.FromMilliseconds(100));
}
private static AnonymizedTrace CreateTraceWithDuration(string traceId, TimeSpan duration)
{
return new AnonymizedTrace(
TraceId: traceId,
OriginalTraceIdHash: "hash",
CapturedAt: DateTimeOffset.UtcNow,
AnonymizedAt: DateTimeOffset.UtcNow,
Type: TraceType.Scan,
Spans: [
new AnonymizedSpan(
SpanId: "span-1",
ParentSpanId: null,
OperationName: "TestOperation",
StartTime: DateTimeOffset.UtcNow,
Duration: duration,
Attributes: ImmutableDictionary<string, string>.Empty,
Events: [])
],
Manifest: new AnonymizationManifest(0, 0, 0, [], "1.0.0"),
TotalDuration: duration);
}
private static AnonymizedTrace CreateTraceWithMultipleSpans(string traceId, int spanCount)
{
var spans = Enumerable.Range(1, spanCount)
.Select(i => new AnonymizedSpan(
SpanId: $"span-{i}",
ParentSpanId: i > 1 ? $"span-{i - 1}" : null,
OperationName: $"Operation_{i}",
StartTime: DateTimeOffset.UtcNow,
Duration: TimeSpan.FromMilliseconds(50),
Attributes: ImmutableDictionary<string, string>.Empty,
Events: []))
.ToImmutableArray();
return new AnonymizedTrace(
TraceId: traceId,
OriginalTraceIdHash: "hash",
CapturedAt: DateTimeOffset.UtcNow,
AnonymizedAt: DateTimeOffset.UtcNow,
Type: TraceType.Scan,
Spans: spans,
Manifest: new AnonymizationManifest(0, 0, 0, [], "1.0.0"),
TotalDuration: TimeSpan.FromMilliseconds(50 * spanCount));
}
}
[Trait("Category", "Unit")]
public sealed class ReplayIntegrationTestBaseTests : ReplayIntegrationTestBase
{
[Fact]
public async Task Services_AreConfigured()
{
// Assert (after InitializeAsync runs)
CorpusManager.Should().NotBeNull();
ReplayOrchestrator.Should().NotBeNull();
TimeProvider.Should().NotBeNull();
Services.Should().NotBeNull();
}
[Fact]
public async Task ReplayAndVerifyAsync_SucceedsForPassingExpectation()
{
// Arrange
var trace = CreateSimpleTrace();
var entry = await CorpusManager.ImportAsync(
trace,
new TraceClassification(TraceCategory.Scan, TraceComplexity.Simple, [], null),
TestContext.Current.CancellationToken);
var expectation = new ReplayExpectation(ShouldSucceed: true);
// Act
var result = await ReplayAndVerifyAsync(entry, expectation);
// Assert
result.Success.Should().BeTrue();
}
[Fact]
public async Task ReplayBatchAsync_ProcessesMultipleTraces()
{
// Arrange
for (int i = 0; i < 5; i++)
{
await CorpusManager.ImportAsync(
CreateSimpleTrace($"trace-{i}"),
new TraceClassification(TraceCategory.Scan, TraceComplexity.Simple, [], null),
TestContext.Current.CancellationToken);
}
// Act
var batchResult = await ReplayBatchAsync(
new TraceQuery(Category: TraceCategory.Scan),
_ => new ReplayExpectation(ShouldSucceed: true));
// Assert
batchResult.TotalCount.Should().Be(5);
batchResult.PassedCount.Should().Be(5);
batchResult.PassRate.Should().Be(1.0m);
}
private static AnonymizedTrace CreateSimpleTrace(string? traceId = null)
{
return new AnonymizedTrace(
TraceId: traceId ?? "test-trace",
OriginalTraceIdHash: "hash",
CapturedAt: DateTimeOffset.UtcNow,
AnonymizedAt: DateTimeOffset.UtcNow,
Type: TraceType.Scan,
Spans: [
new AnonymizedSpan(
SpanId: "span-1",
ParentSpanId: null,
OperationName: "TestOperation",
StartTime: DateTimeOffset.UtcNow,
Duration: TimeSpan.FromMilliseconds(100),
Attributes: ImmutableDictionary<string, string>.Empty,
Events: [])
],
Manifest: new AnonymizationManifest(0, 0, 0, [], "1.0.0"),
TotalDuration: TimeSpan.FromMilliseconds(100));
}
}

View File

@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Testing.Replay\StellaOps.Testing.Replay.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,59 @@
// <copyright file="IReplayOrchestrator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Replay.Anonymization;
using StellaOps.Testing.Temporal;
namespace StellaOps.Testing.Replay;
/// <summary>
/// Orchestrates replay of anonymized traces for testing.
/// </summary>
public interface IReplayOrchestrator
{
/// <summary>
/// Replay an anonymized trace.
/// </summary>
/// <param name="trace">The trace to replay.</param>
/// <param name="timeProvider">Time provider for simulated time.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The replay result.</returns>
Task<ReplayResult> ReplayAsync(
AnonymizedTrace trace,
SimulatedTimeProvider timeProvider,
CancellationToken ct = default);
}
/// <summary>
/// Result of a trace replay.
/// </summary>
/// <param name="Success">Whether replay succeeded.</param>
/// <param name="OutputHash">Hash of replay output.</param>
/// <param name="Duration">Duration of replay.</param>
/// <param name="FailureReason">Reason for failure, if any.</param>
/// <param name="Warnings">Warnings generated during replay.</param>
/// <param name="SpanResults">Results for individual spans.</param>
public sealed record ReplayResult(
bool Success,
string OutputHash,
TimeSpan Duration,
string? FailureReason,
ImmutableArray<string> Warnings,
ImmutableArray<SpanReplayResult> SpanResults);
/// <summary>
/// Result of replaying a single span.
/// </summary>
/// <param name="SpanId">The span identifier.</param>
/// <param name="Success">Whether span replay succeeded.</param>
/// <param name="Duration">Duration of span replay.</param>
/// <param name="DurationDelta">Difference from original duration.</param>
/// <param name="OutputHash">Hash of span output.</param>
public sealed record SpanReplayResult(
string SpanId,
bool Success,
TimeSpan Duration,
TimeSpan DurationDelta,
string OutputHash);

View File

@@ -0,0 +1,126 @@
// <copyright file="ITraceCorpusManager.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Replay.Anonymization;
namespace StellaOps.Testing.Replay;
/// <summary>
/// Manages corpus of anonymized traces for replay testing.
/// </summary>
public interface ITraceCorpusManager
{
/// <summary>
/// Import anonymized trace into corpus.
/// </summary>
/// <param name="trace">The anonymized trace.</param>
/// <param name="classification">Classification of the trace.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The corpus entry.</returns>
Task<TraceCorpusEntry> ImportAsync(
AnonymizedTrace trace,
TraceClassification classification,
CancellationToken ct = default);
/// <summary>
/// Query traces by classification for test scenarios.
/// </summary>
/// <param name="query">The query parameters.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Matching corpus entries.</returns>
IAsyncEnumerable<TraceCorpusEntry> QueryAsync(
TraceQuery query,
CancellationToken ct = default);
/// <summary>
/// Get trace statistics for corpus health.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>Corpus statistics.</returns>
Task<TraceCorpusStatistics> GetStatisticsAsync(CancellationToken ct = default);
}
/// <summary>
/// An entry in the trace corpus.
/// </summary>
/// <param name="EntryId">Unique entry identifier.</param>
/// <param name="Trace">The anonymized trace.</param>
/// <param name="Classification">Trace classification.</param>
/// <param name="ImportedAt">When the trace was imported.</param>
/// <param name="ExpectedOutputHash">Expected output hash for determinism verification.</param>
public sealed record TraceCorpusEntry(
string EntryId,
AnonymizedTrace Trace,
TraceClassification Classification,
DateTimeOffset ImportedAt,
string? ExpectedOutputHash);
/// <summary>
/// Classification for a trace.
/// </summary>
/// <param name="Category">Trace category.</param>
/// <param name="Complexity">Trace complexity level.</param>
/// <param name="Tags">Additional tags.</param>
/// <param name="FailureMode">Expected failure mode, if any.</param>
public sealed record TraceClassification(
TraceCategory Category,
TraceComplexity Complexity,
ImmutableArray<string> Tags,
string? FailureMode);
/// <summary>
/// Category of trace.
/// </summary>
public enum TraceCategory
{
Scan,
Attestation,
VexConsensus,
Advisory,
Evidence,
Auth,
MultiModule
}
/// <summary>
/// Complexity level of a trace.
/// </summary>
public enum TraceComplexity
{
Simple,
Medium,
Complex,
EdgeCase
}
/// <summary>
/// Query parameters for trace corpus.
/// </summary>
/// <param name="Category">Filter by category.</param>
/// <param name="MinComplexity">Minimum complexity level.</param>
/// <param name="RequiredTags">Tags that must be present.</param>
/// <param name="FailureMode">Filter by failure mode.</param>
/// <param name="Limit">Maximum results to return.</param>
public sealed record TraceQuery(
TraceCategory? Category = null,
TraceComplexity? MinComplexity = null,
ImmutableArray<string> RequiredTags = default,
string? FailureMode = null,
int Limit = 100);
/// <summary>
/// Statistics about the trace corpus.
/// </summary>
/// <param name="TotalTraces">Total number of traces.</param>
/// <param name="TracesByCategory">Count by category.</param>
/// <param name="TracesByComplexity">Count by complexity.</param>
/// <param name="OldestTrace">Timestamp of oldest trace.</param>
/// <param name="NewestTrace">Timestamp of newest trace.</param>
public sealed record TraceCorpusStatistics(
int TotalTraces,
ImmutableDictionary<TraceCategory, int> TracesByCategory,
ImmutableDictionary<TraceComplexity, int> TracesByComplexity,
DateTimeOffset? OldestTrace,
DateTimeOffset? NewestTrace);

View File

@@ -0,0 +1,187 @@
// <copyright file="ReplayIntegrationTestBase.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_002_TEST_trace_replay_evidence
// Task: TREP-007, TREP-008
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Replay.Anonymization;
using StellaOps.Testing.Temporal;
using Xunit;
namespace StellaOps.Testing.Replay;
/// <summary>
/// Base class for integration tests that replay production traces.
/// </summary>
public abstract class ReplayIntegrationTestBase : IAsyncLifetime
{
/// <summary>
/// Gets the trace corpus manager.
/// </summary>
protected ITraceCorpusManager CorpusManager { get; private set; } = null!;
/// <summary>
/// Gets the replay orchestrator.
/// </summary>
protected IReplayOrchestrator ReplayOrchestrator { get; private set; } = null!;
/// <summary>
/// Gets the simulated time provider.
/// </summary>
protected SimulatedTimeProvider TimeProvider { get; private set; } = null!;
/// <summary>
/// Gets the service provider.
/// </summary>
protected IServiceProvider Services { get; private set; } = null!;
/// <inheritdoc/>
public virtual async ValueTask InitializeAsync()
{
var services = new ServiceCollection();
ConfigureServices(services);
Services = services.BuildServiceProvider();
CorpusManager = Services.GetRequiredService<ITraceCorpusManager>();
ReplayOrchestrator = Services.GetRequiredService<IReplayOrchestrator>();
TimeProvider = Services.GetRequiredService<SimulatedTimeProvider>();
await OnInitializedAsync();
}
/// <summary>
/// Configure services for the test.
/// </summary>
/// <param name="services">The service collection.</param>
protected virtual void ConfigureServices(IServiceCollection services)
{
services.AddReplayTesting();
}
/// <summary>
/// Called after initialization is complete.
/// </summary>
protected virtual Task OnInitializedAsync() => Task.CompletedTask;
/// <summary>
/// Replay a trace and verify behavior matches expected outcome.
/// </summary>
/// <param name="trace">The trace to replay.</param>
/// <param name="expectation">Expected outcome.</param>
/// <returns>The replay result.</returns>
protected async Task<ReplayResult> ReplayAndVerifyAsync(
TraceCorpusEntry trace,
ReplayExpectation expectation)
{
var result = await ReplayOrchestrator.ReplayAsync(
trace.Trace,
TimeProvider);
VerifyExpectation(result, expectation);
return result;
}
/// <summary>
/// Replay all traces matching query and collect results.
/// </summary>
/// <param name="query">Query for traces to replay.</param>
/// <param name="expectationFactory">Factory to create expectations per trace.</param>
/// <returns>Batch replay results.</returns>
protected async Task<ReplayBatchResult> ReplayBatchAsync(
TraceQuery query,
Func<TraceCorpusEntry, ReplayExpectation> expectationFactory)
{
var results = new List<(TraceCorpusEntry Trace, ReplayResult Result, bool Passed)>();
await foreach (var trace in CorpusManager.QueryAsync(query))
{
var expectation = expectationFactory(trace);
var result = await ReplayOrchestrator.ReplayAsync(trace.Trace, TimeProvider);
var passed = VerifyExpectationSafe(result, expectation);
results.Add((trace, result, passed));
}
return new ReplayBatchResult([.. results]);
}
private static void VerifyExpectation(ReplayResult result, ReplayExpectation expectation)
{
if (expectation.ShouldSucceed)
{
result.Success.Should().BeTrue(
$"Replay should succeed: {result.FailureReason}");
}
else
{
result.Success.Should().BeFalse(
$"Replay should fail with: {expectation.ExpectedFailure}");
}
if (expectation.ExpectedOutputHash is not null)
{
result.OutputHash.Should().Be(expectation.ExpectedOutputHash,
"Output hash should match expected");
}
}
private static bool VerifyExpectationSafe(ReplayResult result, ReplayExpectation expectation)
{
try
{
VerifyExpectation(result, expectation);
return true;
}
catch
{
return false;
}
}
/// <inheritdoc/>
public virtual ValueTask DisposeAsync() => ValueTask.CompletedTask;
}
/// <summary>
/// Expected outcome of a trace replay.
/// </summary>
/// <param name="ShouldSucceed">Whether replay should succeed.</param>
/// <param name="ExpectedFailure">Expected failure reason, if should fail.</param>
/// <param name="ExpectedOutputHash">Expected output hash for determinism check.</param>
/// <param name="ExpectedWarnings">Expected warnings.</param>
public sealed record ReplayExpectation(
bool ShouldSucceed,
string? ExpectedFailure = null,
string? ExpectedOutputHash = null,
ImmutableArray<string> ExpectedWarnings = default);
/// <summary>
/// Result of a batch replay operation.
/// </summary>
/// <param name="Results">Individual trace results.</param>
public sealed record ReplayBatchResult(
ImmutableArray<(TraceCorpusEntry Trace, ReplayResult Result, bool Passed)> Results)
{
/// <summary>
/// Gets the total number of traces replayed.
/// </summary>
public int TotalCount => Results.Length;
/// <summary>
/// Gets the number of traces that passed.
/// </summary>
public int PassedCount => Results.Count(r => r.Passed);
/// <summary>
/// Gets the number of traces that failed.
/// </summary>
public int FailedCount => Results.Count(r => !r.Passed);
/// <summary>
/// Gets the pass rate as a decimal (0-1).
/// </summary>
public decimal PassRate => TotalCount > 0 ? (decimal)PassedCount / TotalCount : 0;
}

View File

@@ -0,0 +1,209 @@
// <copyright file="ServiceCollectionExtensions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Replay.Anonymization;
using StellaOps.Testing.Temporal;
namespace StellaOps.Testing.Replay;
/// <summary>
/// Extension methods for configuring replay testing services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Add replay testing services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddReplayTesting(this IServiceCollection services)
{
services.AddSingleton<SimulatedTimeProvider>(sp =>
new SimulatedTimeProvider(DateTimeOffset.UtcNow));
services.AddSingleton<TimeProvider>(sp =>
sp.GetRequiredService<SimulatedTimeProvider>());
services.AddSingleton<ITraceCorpusManager, InMemoryTraceCorpusManager>();
services.AddSingleton<IReplayOrchestrator, DefaultReplayOrchestrator>();
services.AddSingleton<ITraceAnonymizer, TraceAnonymizer>();
services.AddSingleton(typeof(ILogger<>), typeof(NullLogger<>));
return services;
}
}
/// <summary>
/// In-memory implementation of trace corpus manager for testing.
/// </summary>
internal sealed class InMemoryTraceCorpusManager : ITraceCorpusManager
{
private readonly ConcurrentDictionary<string, TraceCorpusEntry> _traces = new();
private readonly TimeProvider _timeProvider;
private int _nextId;
public InMemoryTraceCorpusManager(TimeProvider timeProvider)
{
_timeProvider = timeProvider;
}
public Task<TraceCorpusEntry> ImportAsync(
AnonymizedTrace trace,
TraceClassification classification,
CancellationToken ct = default)
{
var entryId = $"corpus-{Interlocked.Increment(ref _nextId):D6}";
var entry = new TraceCorpusEntry(
EntryId: entryId,
Trace: trace,
Classification: classification,
ImportedAt: _timeProvider.GetUtcNow(),
ExpectedOutputHash: null);
_traces[entryId] = entry;
return Task.FromResult(entry);
}
public async IAsyncEnumerable<TraceCorpusEntry> QueryAsync(
TraceQuery query,
[EnumeratorCancellation] CancellationToken ct = default)
{
var results = _traces.Values.AsEnumerable();
if (query.Category is not null)
{
results = results.Where(e => e.Classification.Category == query.Category);
}
if (query.MinComplexity is not null)
{
results = results.Where(e => e.Classification.Complexity >= query.MinComplexity);
}
if (!query.RequiredTags.IsDefaultOrEmpty)
{
results = results.Where(e =>
query.RequiredTags.All(t => e.Classification.Tags.Contains(t)));
}
if (query.FailureMode is not null)
{
results = results.Where(e => e.Classification.FailureMode == query.FailureMode);
}
var limited = results.Take(query.Limit);
foreach (var entry in limited)
{
ct.ThrowIfCancellationRequested();
await Task.Yield();
yield return entry;
}
}
public Task<TraceCorpusStatistics> GetStatisticsAsync(CancellationToken ct = default)
{
var entries = _traces.Values.ToList();
var byCategory = entries
.GroupBy(e => e.Classification.Category)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var byComplexity = entries
.GroupBy(e => e.Classification.Complexity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var oldest = entries.Count > 0 ? entries.Min(e => e.ImportedAt) : (DateTimeOffset?)null;
var newest = entries.Count > 0 ? entries.Max(e => e.ImportedAt) : (DateTimeOffset?)null;
return Task.FromResult(new TraceCorpusStatistics(
TotalTraces: entries.Count,
TracesByCategory: byCategory,
TracesByComplexity: byComplexity,
OldestTrace: oldest,
NewestTrace: newest));
}
}
/// <summary>
/// Default implementation of replay orchestrator.
/// </summary>
internal sealed class DefaultReplayOrchestrator : IReplayOrchestrator
{
private readonly ILogger<DefaultReplayOrchestrator> _logger;
public DefaultReplayOrchestrator(ILogger<DefaultReplayOrchestrator> logger)
{
_logger = logger;
}
public Task<ReplayResult> ReplayAsync(
AnonymizedTrace trace,
SimulatedTimeProvider timeProvider,
CancellationToken ct = default)
{
var startTime = timeProvider.GetUtcNow();
var spanResults = new List<SpanReplayResult>();
var warnings = new List<string>();
foreach (var span in trace.Spans)
{
ct.ThrowIfCancellationRequested();
// Simulate span execution
timeProvider.Advance(span.Duration);
var replayDuration = span.Duration; // In simulation, same duration
var delta = TimeSpan.Zero;
spanResults.Add(new SpanReplayResult(
SpanId: span.SpanId,
Success: true,
Duration: replayDuration,
DurationDelta: delta,
OutputHash: ComputeSpanHash(span)));
}
var endTime = timeProvider.GetUtcNow();
var totalDuration = endTime - startTime;
var outputHash = ComputeOutputHash(spanResults);
_logger.LogDebug(
"Replayed trace {TraceId} with {SpanCount} spans in {Duration}",
trace.TraceId, trace.Spans.Length, totalDuration);
return Task.FromResult(new ReplayResult(
Success: true,
OutputHash: outputHash,
Duration: totalDuration,
FailureReason: null,
Warnings: [.. warnings],
SpanResults: [.. spanResults]));
}
private static string ComputeSpanHash(AnonymizedSpan span)
{
var input = $"{span.SpanId}:{span.OperationName}:{span.Duration.Ticks}";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes).ToLowerInvariant()[..16];
}
private static string ComputeOutputHash(List<SpanReplayResult> results)
{
var input = string.Join("|", results.Select(r => r.OutputHash));
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,31 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Infrastructure for replay-based integration testing using production traces</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.Replay.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Testing.Temporal\StellaOps.Testing.Temporal.csproj" />
<ProjectReference Include="..\..\..\Replay\__Libraries\StellaOps.Replay.Anonymization\StellaOps.Replay.Anonymization.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,154 @@
// <copyright file="Models.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-006, CCUT-007
using System.Collections.Immutable;
namespace StellaOps.Testing.SchemaEvolution;
/// <summary>
/// Represents a schema version.
/// </summary>
/// <param name="VersionId">Version identifier (e.g., "v2024.11", "v2024.12").</param>
/// <param name="MigrationId">Migration identifier if applicable.</param>
/// <param name="AppliedAt">When this version was applied.</param>
public sealed record SchemaVersion(
string VersionId,
string? MigrationId,
DateTimeOffset AppliedAt);
/// <summary>
/// Result of schema compatibility test.
/// </summary>
/// <param name="IsCompatible">Whether the test passed.</param>
/// <param name="BaselineVersion">Schema version used as baseline.</param>
/// <param name="TargetVersion">Target schema version tested against.</param>
/// <param name="TestedOperation">Type of operation tested.</param>
/// <param name="ErrorMessage">Error message if not compatible.</param>
/// <param name="Exception">Exception if one occurred.</param>
public sealed record SchemaCompatibilityResult(
bool IsCompatible,
string BaselineVersion,
string TargetVersion,
SchemaOperationType TestedOperation,
string? ErrorMessage = null,
Exception? Exception = null);
/// <summary>
/// Type of schema operation tested.
/// </summary>
public enum SchemaOperationType
{
/// <summary>
/// Read operation (SELECT).
/// </summary>
Read,
/// <summary>
/// Write operation (INSERT/UPDATE).
/// </summary>
Write,
/// <summary>
/// Delete operation (DELETE).
/// </summary>
Delete,
/// <summary>
/// Migration forward (upgrade).
/// </summary>
MigrationUp,
/// <summary>
/// Migration rollback (downgrade).
/// </summary>
MigrationDown
}
/// <summary>
/// Configuration for schema evolution tests.
/// </summary>
/// <param name="SupportedVersions">Versions to test compatibility with.</param>
/// <param name="CurrentVersion">Current schema version.</param>
/// <param name="BackwardCompatibilityVersionCount">Number of previous versions to test backward compatibility.</param>
/// <param name="ForwardCompatibilityVersionCount">Number of future versions to test forward compatibility.</param>
/// <param name="TimeoutPerTest">Timeout per individual test.</param>
public sealed record SchemaEvolutionConfig(
ImmutableArray<string> SupportedVersions,
string CurrentVersion,
int BackwardCompatibilityVersionCount = 2,
int ForwardCompatibilityVersionCount = 1,
TimeSpan TimeoutPerTest = default)
{
/// <summary>
/// Gets the timeout per test.
/// </summary>
public TimeSpan TimeoutPerTest { get; init; } =
TimeoutPerTest == default ? TimeSpan.FromMinutes(5) : TimeoutPerTest;
}
/// <summary>
/// Information about a database migration.
/// </summary>
/// <param name="MigrationId">Unique migration identifier.</param>
/// <param name="Version">Version this migration belongs to.</param>
/// <param name="Description">Human-readable description.</param>
/// <param name="HasUpScript">Whether up migration script exists.</param>
/// <param name="HasDownScript">Whether down migration script exists.</param>
/// <param name="AppliedAt">When the migration was applied.</param>
public sealed record MigrationInfo(
string MigrationId,
string Version,
string Description,
bool HasUpScript,
bool HasDownScript,
DateTimeOffset? AppliedAt);
/// <summary>
/// Result of testing migration rollback.
/// </summary>
/// <param name="Migration">Migration that was tested.</param>
/// <param name="Success">Whether rollback succeeded.</param>
/// <param name="DurationMs">Duration of rollback in milliseconds.</param>
/// <param name="ErrorMessage">Error message if rollback failed.</param>
public sealed record MigrationRollbackResult(
MigrationInfo Migration,
bool Success,
long DurationMs,
string? ErrorMessage);
/// <summary>
/// Test data seeding result.
/// </summary>
/// <param name="SchemaVersion">Schema version data was seeded for.</param>
/// <param name="RecordsSeeded">Number of records seeded.</param>
/// <param name="DurationMs">Duration of seeding in milliseconds.</param>
public sealed record SeedDataResult(
string SchemaVersion,
int RecordsSeeded,
long DurationMs);
/// <summary>
/// Report of schema evolution test suite.
/// </summary>
/// <param name="TotalTests">Total number of tests executed.</param>
/// <param name="PassedTests">Number of passed tests.</param>
/// <param name="FailedTests">Number of failed tests.</param>
/// <param name="SkippedTests">Number of skipped tests.</param>
/// <param name="Results">Individual test results.</param>
/// <param name="TotalDurationMs">Total duration in milliseconds.</param>
public sealed record SchemaEvolutionReport(
int TotalTests,
int PassedTests,
int FailedTests,
int SkippedTests,
ImmutableArray<SchemaCompatibilityResult> Results,
long TotalDurationMs)
{
/// <summary>
/// Gets a value indicating whether all tests passed.
/// </summary>
public bool IsSuccess => FailedTests == 0;
}

View File

@@ -0,0 +1,210 @@
// <copyright file="PostgresSchemaEvolutionTestBase.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-007, CCUT-008
using Microsoft.Extensions.Logging;
using Npgsql;
using Testcontainers.PostgreSql;
namespace StellaOps.Testing.SchemaEvolution;
/// <summary>
/// PostgreSQL-based schema evolution test base using Testcontainers.
/// </summary>
public abstract class PostgresSchemaEvolutionTestBase : SchemaEvolutionTestBase
{
private readonly Dictionary<string, PostgreSqlContainer> _containers = new();
private readonly SemaphoreSlim _containerLock = new(1, 1);
private bool _disposed;
/// <summary>
/// Initializes a new instance of the <see cref="PostgresSchemaEvolutionTestBase"/> class.
/// </summary>
/// <param name="logger">Logger instance.</param>
protected PostgresSchemaEvolutionTestBase(ILogger? logger = null)
: base(logger)
{
}
/// <summary>
/// Gets the schema versions available for testing.
/// </summary>
protected abstract IReadOnlyList<string> AvailableSchemaVersions { get; }
/// <summary>
/// Gets the PostgreSQL image tag for a schema version.
/// Override to use version-specific images.
/// </summary>
/// <param name="schemaVersion">Schema version.</param>
/// <returns>Docker image tag.</returns>
protected virtual string GetPostgresImageTag(string schemaVersion)
{
// Default to standard PostgreSQL 16
return "postgres:16-alpine";
}
/// <inheritdoc/>
protected override string GetPreviousSchemaVersion(string current)
{
var index = AvailableSchemaVersions.ToList().IndexOf(current);
if (index <= 0)
{
throw new InvalidOperationException($"No previous version available for {current}");
}
return AvailableSchemaVersions[index - 1];
}
/// <inheritdoc/>
protected override async Task<string> CreateDatabaseWithSchemaAsync(string schemaVersion, CancellationToken ct)
{
await _containerLock.WaitAsync(ct);
try
{
if (_containers.TryGetValue(schemaVersion, out var existing))
{
return existing.GetConnectionString();
}
var container = new PostgreSqlBuilder()
.WithImage(GetPostgresImageTag(schemaVersion))
.WithDatabase($"test_{schemaVersion.Replace(".", "_")}")
.WithUsername("test")
.WithPassword("test")
.Build();
await container.StartAsync(ct);
// Apply migrations up to specified version
var connectionString = container.GetConnectionString();
await ApplyMigrationsToVersionAsync(connectionString, schemaVersion, ct);
_containers[schemaVersion] = container;
return connectionString;
}
finally
{
_containerLock.Release();
}
}
/// <summary>
/// Apply migrations up to a specific version.
/// </summary>
/// <param name="connectionString">Database connection string.</param>
/// <param name="targetVersion">Target schema version.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Task representing the async operation.</returns>
protected abstract Task ApplyMigrationsToVersionAsync(
string connectionString,
string targetVersion,
CancellationToken ct);
/// <inheritdoc/>
protected override async Task<IReadOnlyList<MigrationInfo>> GetMigrationHistoryAsync(CancellationToken ct)
{
// Default implementation queries the migration history table
// Subclasses should override for their specific migration tool
var migrations = new List<MigrationInfo>();
if (DataSource == null)
{
return migrations;
}
try
{
await using var cmd = DataSource.CreateCommand(
"SELECT migration_id, version, description, applied_at FROM __migrations ORDER BY applied_at");
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
migrations.Add(new MigrationInfo(
MigrationId: reader.GetString(0),
Version: reader.GetString(1),
Description: reader.GetString(2),
HasUpScript: true, // Assume up script exists if migration was applied
HasDownScript: await CheckDownScriptExistsAsync(reader.GetString(0), ct),
AppliedAt: reader.GetDateTime(3)));
}
}
catch (Exception)
{
// Migration table may not exist in older versions
}
return migrations;
}
/// <summary>
/// Check if a down script exists for a migration.
/// </summary>
/// <param name="migrationId">Migration identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>True if down script exists.</returns>
protected virtual Task<bool> CheckDownScriptExistsAsync(string migrationId, CancellationToken ct)
{
// Default: assume down scripts exist
// Subclasses should override to check actual migration files
return Task.FromResult(true);
}
/// <inheritdoc/>
protected override async Task ApplyMigrationDownAsync(
NpgsqlDataSource dataSource,
MigrationInfo migration,
CancellationToken ct)
{
var downScript = await GetMigrationDownScriptAsync(migration.MigrationId, ct);
if (string.IsNullOrWhiteSpace(downScript))
{
throw new InvalidOperationException($"No down script found for migration {migration.MigrationId}");
}
await using var cmd = dataSource.CreateCommand(downScript);
await cmd.ExecuteNonQueryAsync(ct);
}
/// <summary>
/// Get the down script for a migration.
/// </summary>
/// <param name="migrationId">Migration identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Down script SQL.</returns>
protected abstract Task<string?> GetMigrationDownScriptAsync(string migrationId, CancellationToken ct);
/// <summary>
/// Dispose resources.
/// </summary>
/// <returns>ValueTask representing the async operation.</returns>
public new async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
await _containerLock.WaitAsync();
try
{
foreach (var container in _containers.Values)
{
await container.DisposeAsync();
}
_containers.Clear();
}
finally
{
_containerLock.Release();
_containerLock.Dispose();
}
await base.DisposeAsync();
_disposed = true;
}
}

View File

@@ -0,0 +1,335 @@
// <copyright file="SchemaEvolutionTestBase.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-007
using System.Diagnostics;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Npgsql;
namespace StellaOps.Testing.SchemaEvolution;
/// <summary>
/// Base class for schema evolution tests that verify backward/forward compatibility.
/// </summary>
public abstract class SchemaEvolutionTestBase : IAsyncDisposable
{
private readonly ILogger _logger;
private NpgsqlDataSource? _dataSource;
private bool _disposed;
/// <summary>
/// Initializes a new instance of the <see cref="SchemaEvolutionTestBase"/> class.
/// </summary>
/// <param name="logger">Logger instance.</param>
protected SchemaEvolutionTestBase(ILogger? logger = null)
{
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger.Instance;
}
/// <summary>
/// Gets the current schema version.
/// </summary>
protected string? CurrentSchemaVersion { get; private set; }
/// <summary>
/// Gets the data source for the current test database.
/// </summary>
protected NpgsqlDataSource? DataSource => _dataSource;
/// <summary>
/// Initialize the test environment.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>Task representing the async operation.</returns>
public virtual async Task InitializeAsync(CancellationToken ct = default)
{
CurrentSchemaVersion = await GetCurrentSchemaVersionAsync(ct);
_logger.LogInformation("Schema evolution test initialized. Current version: {Version}", CurrentSchemaVersion);
}
/// <summary>
/// Test current code against schema version N-1.
/// </summary>
/// <param name="testAction">Test action to execute.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Compatibility result.</returns>
protected async Task<SchemaCompatibilityResult> TestAgainstPreviousSchemaAsync(
Func<NpgsqlDataSource, Task> testAction,
CancellationToken ct = default)
{
if (CurrentSchemaVersion == null)
{
throw new InvalidOperationException("Call InitializeAsync first");
}
var previousVersion = GetPreviousSchemaVersion(CurrentSchemaVersion);
return await TestAgainstSchemaVersionAsync(previousVersion, SchemaOperationType.Read, testAction, ct);
}
/// <summary>
/// Test current code against specific schema version.
/// </summary>
/// <param name="schemaVersion">Schema version to test against.</param>
/// <param name="operationType">Type of operation being tested.</param>
/// <param name="testAction">Test action to execute.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Compatibility result.</returns>
protected async Task<SchemaCompatibilityResult> TestAgainstSchemaVersionAsync(
string schemaVersion,
SchemaOperationType operationType,
Func<NpgsqlDataSource, Task> testAction,
CancellationToken ct = default)
{
_logger.LogInformation(
"Testing against schema version {SchemaVersion} (operation: {Operation})",
schemaVersion, operationType);
try
{
// Create isolated database with specific schema
var connectionString = await CreateDatabaseWithSchemaAsync(schemaVersion, ct);
await using var dataSource = NpgsqlDataSource.Create(connectionString);
_dataSource = dataSource;
// Execute test
await testAction(dataSource);
_logger.LogInformation("Schema compatibility test passed for version {Version}", schemaVersion);
return new SchemaCompatibilityResult(
IsCompatible: true,
BaselineVersion: CurrentSchemaVersion ?? "unknown",
TargetVersion: schemaVersion,
TestedOperation: operationType);
}
catch (Exception ex)
{
_logger.LogError(ex, "Schema compatibility test failed for version {Version}", schemaVersion);
return new SchemaCompatibilityResult(
IsCompatible: false,
BaselineVersion: CurrentSchemaVersion ?? "unknown",
TargetVersion: schemaVersion,
TestedOperation: operationType,
ErrorMessage: ex.Message,
Exception: ex);
}
}
/// <summary>
/// Test read operations work with older schema versions.
/// </summary>
/// <typeparam name="T">Type of result being read.</typeparam>
/// <param name="previousVersions">Previous versions to test.</param>
/// <param name="readOperation">Read operation to execute.</param>
/// <param name="validateResult">Validation function for results.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of compatibility results.</returns>
protected async Task<IReadOnlyList<SchemaCompatibilityResult>> TestReadBackwardCompatibilityAsync<T>(
string[] previousVersions,
Func<NpgsqlDataSource, Task<T>> readOperation,
Func<T, bool> validateResult,
CancellationToken ct = default)
{
var results = new List<SchemaCompatibilityResult>();
foreach (var version in previousVersions)
{
var result = await TestAgainstSchemaVersionAsync(
version,
SchemaOperationType.Read,
async dataSource =>
{
// Seed data using old schema
await SeedTestDataAsync(dataSource, version, ct);
// Read using current code
var readResult = await readOperation(dataSource);
// Validate result
validateResult(readResult).Should().BeTrue(
$"Read operation should work against schema version {version}");
},
ct);
results.Add(result);
}
return results;
}
/// <summary>
/// Test write operations work with newer schema versions.
/// </summary>
/// <param name="futureVersions">Future versions to test.</param>
/// <param name="writeOperation">Write operation to execute.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of compatibility results.</returns>
protected async Task<IReadOnlyList<SchemaCompatibilityResult>> TestWriteForwardCompatibilityAsync(
string[] futureVersions,
Func<NpgsqlDataSource, Task> writeOperation,
CancellationToken ct = default)
{
var results = new List<SchemaCompatibilityResult>();
foreach (var version in futureVersions)
{
var result = await TestAgainstSchemaVersionAsync(
version,
SchemaOperationType.Write,
async dataSource =>
{
// Write using current code - should not throw
await writeOperation(dataSource);
},
ct);
results.Add(result);
}
return results;
}
/// <summary>
/// Test that schema changes have backward-compatible migrations.
/// </summary>
/// <param name="migrationsToTest">Number of recent migrations to test.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of migration rollback results.</returns>
protected async Task<IReadOnlyList<MigrationRollbackResult>> TestMigrationRollbacksAsync(
int migrationsToTest = 5,
CancellationToken ct = default)
{
var results = new List<MigrationRollbackResult>();
var migrations = await GetMigrationHistoryAsync(ct);
foreach (var migration in migrations.TakeLast(migrationsToTest))
{
if (!migration.HasDownScript)
{
results.Add(new MigrationRollbackResult(
Migration: migration,
Success: false,
DurationMs: 0,
ErrorMessage: "Migration does not have down script"));
continue;
}
var result = await TestMigrationRollbackAsync(migration, ct);
results.Add(result);
}
return results;
}
/// <summary>
/// Test a single migration rollback.
/// </summary>
/// <param name="migration">Migration to test.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Rollback result.</returns>
protected virtual async Task<MigrationRollbackResult> TestMigrationRollbackAsync(
MigrationInfo migration,
CancellationToken ct = default)
{
var sw = Stopwatch.StartNew();
try
{
// Create a fresh database with migrations up to this point
var connectionString = await CreateDatabaseWithSchemaAsync(migration.Version, ct);
await using var dataSource = NpgsqlDataSource.Create(connectionString);
// Apply the down migration
await ApplyMigrationDownAsync(dataSource, migration, ct);
sw.Stop();
return new MigrationRollbackResult(
Migration: migration,
Success: true,
DurationMs: sw.ElapsedMilliseconds,
ErrorMessage: null);
}
catch (Exception ex)
{
sw.Stop();
return new MigrationRollbackResult(
Migration: migration,
Success: false,
DurationMs: sw.ElapsedMilliseconds,
ErrorMessage: ex.Message);
}
}
/// <summary>
/// Seed test data for a specific schema version.
/// </summary>
/// <param name="dataSource">Data source to seed.</param>
/// <param name="schemaVersion">Schema version.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Task representing the async operation.</returns>
protected abstract Task SeedTestDataAsync(NpgsqlDataSource dataSource, string schemaVersion, CancellationToken ct);
/// <summary>
/// Get previous schema version.
/// </summary>
/// <param name="current">Current schema version.</param>
/// <returns>Previous schema version.</returns>
protected abstract string GetPreviousSchemaVersion(string current);
/// <summary>
/// Get current schema version from the database or configuration.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>Current schema version.</returns>
protected abstract Task<string> GetCurrentSchemaVersionAsync(CancellationToken ct);
/// <summary>
/// Create a database with a specific schema version.
/// </summary>
/// <param name="schemaVersion">Schema version to create.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Connection string to the created database.</returns>
protected abstract Task<string> CreateDatabaseWithSchemaAsync(string schemaVersion, CancellationToken ct);
/// <summary>
/// Get migration history.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of migrations.</returns>
protected abstract Task<IReadOnlyList<MigrationInfo>> GetMigrationHistoryAsync(CancellationToken ct);
/// <summary>
/// Apply a migration down script.
/// </summary>
/// <param name="dataSource">Data source.</param>
/// <param name="migration">Migration to roll back.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Task representing the async operation.</returns>
protected abstract Task ApplyMigrationDownAsync(NpgsqlDataSource dataSource, MigrationInfo migration, CancellationToken ct);
/// <summary>
/// Dispose resources.
/// </summary>
/// <returns>ValueTask representing the async operation.</returns>
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
if (_dataSource != null)
{
await _dataSource.DisposeAsync();
}
_disposed = true;
GC.SuppressFinalize(this);
}
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<OutputType>Exe</OutputType>
<UseAppHost>true</UseAppHost>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Schema evolution testing framework for backward/forward compatibility verification</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Testing.SchemaEvolution.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Npgsql" />
<PackageReference Include="Testcontainers.PostgreSql" />
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,239 @@
// <copyright file="ClockSkewAssertionsTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
namespace StellaOps.Testing.Temporal.Tests;
[Trait("Category", "Unit")]
public sealed class ClockSkewAssertionsTests
{
private static readonly DateTimeOffset StartTime = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
[Fact]
public async Task AssertHandlesClockJumpForwardAsync_SuccessfulOperation_Passes()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(StartTime);
var operationResult = 42;
// Act
var act = async () => await ClockSkewAssertions.AssertHandlesClockJumpForwardAsync(
timeProvider,
() => Task.FromResult(operationResult),
jumpAmount: TimeSpan.FromHours(2),
isValidResult: r => r == 42);
// Assert
await act.Should().NotThrowAsync();
}
[Fact]
public async Task AssertHandlesClockJumpForwardAsync_FailingOperation_Throws()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(StartTime);
var callCount = 0;
// Act
var act = async () => await ClockSkewAssertions.AssertHandlesClockJumpForwardAsync(
timeProvider,
() =>
{
callCount++;
return Task.FromResult(callCount == 1 ? 42 : -1); // Fails after jump
},
jumpAmount: TimeSpan.FromHours(2),
isValidResult: r => r == 42);
// Assert
await act.Should().ThrowAsync<ClockSkewAssertionException>()
.WithMessage("*after forward clock jump*");
}
[Fact]
public async Task AssertHandlesClockJumpBackwardAsync_AllowFailure_DoesNotThrow()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(StartTime);
var callCount = 0;
// Act
var act = async () => await ClockSkewAssertions.AssertHandlesClockJumpBackwardAsync(
timeProvider,
() =>
{
callCount++;
if (callCount > 1)
{
throw new InvalidOperationException("Time went backward!");
}
return Task.FromResult(42);
},
jumpAmount: TimeSpan.FromMinutes(30),
isValidResult: r => r == 42,
allowFailure: true);
// Assert
await act.Should().NotThrowAsync();
}
[Fact]
public async Task AssertHandlesClockDriftAsync_StableOperation_ReturnsReport()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(StartTime);
// Act
var report = await ClockSkewAssertions.AssertHandlesClockDriftAsync(
timeProvider,
() => Task.FromResult(42),
driftPerSecond: TimeSpan.FromMilliseconds(10),
testDuration: TimeSpan.FromSeconds(10),
stepInterval: TimeSpan.FromSeconds(1),
isValidResult: r => r == 42);
// Assert
report.TotalSteps.Should().Be(10);
report.FailedSteps.Should().Be(0);
report.SuccessRate.Should().Be(100m);
}
[Fact]
public async Task AssertHandlesClockDriftAsync_UnstableOperation_Throws()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(StartTime);
var stepCount = 0;
// Act
var act = async () => await ClockSkewAssertions.AssertHandlesClockDriftAsync(
timeProvider,
() =>
{
stepCount++;
return Task.FromResult(stepCount < 5 ? 42 : -1); // Fails after step 4
},
driftPerSecond: TimeSpan.FromMilliseconds(10),
testDuration: TimeSpan.FromSeconds(10),
stepInterval: TimeSpan.FromSeconds(1),
isValidResult: r => r == 42);
// Assert
await act.Should().ThrowAsync<ClockSkewAssertionException>()
.WithMessage("*failed under clock drift*");
}
[Fact]
public void AssertTimestampsWithinTolerance_WithinTolerance_Passes()
{
// Arrange
var expected = StartTime;
var actual = StartTime.AddSeconds(30);
// Act
var act = () => ClockSkewAssertions.AssertTimestampsWithinTolerance(
expected, actual, tolerance: TimeSpan.FromMinutes(1));
// Assert
act.Should().NotThrow();
}
[Fact]
public void AssertTimestampsWithinTolerance_OutsideTolerance_Throws()
{
// Arrange
var expected = StartTime;
var actual = StartTime.AddMinutes(10);
// Act
var act = () => ClockSkewAssertions.AssertTimestampsWithinTolerance(
expected, actual, tolerance: TimeSpan.FromMinutes(5));
// Assert
act.Should().Throw<ClockSkewAssertionException>()
.WithMessage("*exceeds tolerance*");
}
[Fact]
public void AssertMonotonicTimestamps_Monotonic_Passes()
{
// Arrange
var timestamps = new[]
{
StartTime,
StartTime.AddSeconds(1),
StartTime.AddSeconds(5),
StartTime.AddMinutes(1)
};
// Act
var act = () => ClockSkewAssertions.AssertMonotonicTimestamps(timestamps);
// Assert
act.Should().NotThrow();
}
[Fact]
public void AssertMonotonicTimestamps_NonMonotonic_Throws()
{
// Arrange
var timestamps = new[]
{
StartTime,
StartTime.AddSeconds(5),
StartTime.AddSeconds(3), // Goes backward!
StartTime.AddMinutes(1)
};
// Act
var act = () => ClockSkewAssertions.AssertMonotonicTimestamps(timestamps);
// Assert
act.Should().Throw<ClockSkewAssertionException>()
.WithMessage("*not monotonically increasing*index 2*");
}
[Fact]
public void AssertMonotonicTimestamps_EqualTimestamps_FailsWhenNotAllowed()
{
// Arrange
var timestamps = new[]
{
StartTime,
StartTime, // Equal to previous
StartTime.AddSeconds(1)
};
// Act
var act = () => ClockSkewAssertions.AssertMonotonicTimestamps(timestamps, allowEqual: false);
// Assert
act.Should().Throw<ClockSkewAssertionException>();
}
[Fact]
public void AssertMonotonicTimestamps_EqualTimestamps_PassesWhenAllowed()
{
// Arrange
var timestamps = new[]
{
StartTime,
StartTime, // Equal to previous
StartTime.AddSeconds(1)
};
// Act
var act = () => ClockSkewAssertions.AssertMonotonicTimestamps(timestamps, allowEqual: true);
// Assert
act.Should().NotThrow();
}
[Fact]
public void DefaultSkewTolerance_IsFiveMinutes()
{
ClockSkewAssertions.DefaultSkewTolerance.Should().Be(TimeSpan.FromMinutes(5));
}
}

View File

@@ -0,0 +1,249 @@
// <copyright file="IdempotencyVerifierTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
namespace StellaOps.Testing.Temporal.Tests;
[Trait("Category", "Unit")]
public sealed class IdempotencyVerifierTests
{
[Fact]
public async Task VerifyAsync_IdempotentOperation_ReturnsSuccess()
{
// Arrange
var counter = 0;
var verifier = new IdempotencyVerifier<int>(() => 42); // Always returns same value
// Act
var result = await verifier.VerifyAsync(
async () =>
{
counter++;
await Task.CompletedTask;
},
repetitions: 5,
ct: TestContext.Current.CancellationToken);
// Assert
result.IsIdempotent.Should().BeTrue();
result.AllSucceeded.Should().BeTrue();
result.Repetitions.Should().Be(5);
result.DivergentStates.Should().BeEmpty();
counter.Should().Be(5);
}
[Fact]
public async Task VerifyAsync_NonIdempotentOperation_ReturnsFailure()
{
// Arrange
var counter = 0;
var verifier = new IdempotencyVerifier<int>(() => counter); // Returns incrementing value
// Act
var result = await verifier.VerifyAsync(
async () =>
{
counter++;
await Task.CompletedTask;
},
repetitions: 3,
ct: TestContext.Current.CancellationToken);
// Assert
result.IsIdempotent.Should().BeFalse();
result.States.Should().HaveCount(3);
result.States.Should().BeEquivalentTo([1, 2, 3]);
result.DivergentStates.Should().HaveCount(2); // States 2 and 3 diverge from state 1
}
[Fact]
public async Task VerifyAsync_OperationThrows_RecordsException()
{
// Arrange
var attempts = 0;
var verifier = new IdempotencyVerifier<int>(() => 42);
// Act
var result = await verifier.VerifyAsync(
async () =>
{
attempts++;
if (attempts == 2)
{
throw new InvalidOperationException("Intentional failure");
}
await Task.CompletedTask;
},
repetitions: 3,
ct: TestContext.Current.CancellationToken);
// Assert
result.AllSucceeded.Should().BeFalse();
result.Exceptions.Should().ContainSingle();
result.Exceptions[0].ExecutionIndex.Should().Be(1); // Second attempt (0-indexed)
result.States.Should().HaveCount(2); // Only successful executions
}
[Fact]
public async Task VerifyWithRetriesAsync_AppliesDelaysBetweenRetries()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero));
var capturedTimes = new List<DateTimeOffset>();
var verifier = new IdempotencyVerifier<DateTimeOffset>(() => timeProvider.GetUtcNow());
// Act
var result = await verifier.VerifyWithRetriesAsync(
async () =>
{
capturedTimes.Add(timeProvider.GetUtcNow());
await Task.CompletedTask;
},
retryDelays:
[
TimeSpan.FromSeconds(1),
TimeSpan.FromSeconds(5),
TimeSpan.FromSeconds(30)
],
timeProvider,
ct: TestContext.Current.CancellationToken);
// Assert
capturedTimes.Should().HaveCount(4); // Initial + 3 retries
(capturedTimes[1] - capturedTimes[0]).Should().Be(TimeSpan.FromSeconds(1));
(capturedTimes[2] - capturedTimes[1]).Should().Be(TimeSpan.FromSeconds(5));
(capturedTimes[3] - capturedTimes[2]).Should().Be(TimeSpan.FromSeconds(30));
}
[Fact]
public async Task VerifyWithExponentialBackoffAsync_AppliesExponentialDelays()
{
// Arrange
var timeProvider = new SimulatedTimeProvider(new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero));
var capturedTimes = new List<DateTimeOffset>();
var verifier = new IdempotencyVerifier<DateTimeOffset>(() => timeProvider.GetUtcNow());
// Act
var result = await verifier.VerifyWithExponentialBackoffAsync(
async () =>
{
capturedTimes.Add(timeProvider.GetUtcNow());
await Task.CompletedTask;
},
maxRetries: 3,
initialDelay: TimeSpan.FromSeconds(1),
timeProvider,
ct: TestContext.Current.CancellationToken);
// Assert
capturedTimes.Should().HaveCount(4);
(capturedTimes[1] - capturedTimes[0]).Should().Be(TimeSpan.FromSeconds(1));
(capturedTimes[2] - capturedTimes[1]).Should().Be(TimeSpan.FromSeconds(2));
(capturedTimes[3] - capturedTimes[2]).Should().Be(TimeSpan.FromSeconds(4));
}
[Fact]
public void Verify_SynchronousOperation_Works()
{
// Arrange
var verifier = new IdempotencyVerifier<string>(() => "constant");
// Act
var result = verifier.Verify(() => { /* no-op */ }, repetitions: 3);
// Assert
result.IsIdempotent.Should().BeTrue();
result.States.Should().AllBe("constant");
}
[Fact]
public void Verify_WithCustomComparer_UsesComparer()
{
// Arrange
var results = new Queue<string>(["HELLO", "hello", "Hello"]);
var verifier = new IdempotencyVerifier<string>(
() => results.Dequeue(),
StringComparer.OrdinalIgnoreCase); // Case-insensitive
// Act
var result = verifier.Verify(() => { }, repetitions: 3);
// Assert
result.IsIdempotent.Should().BeTrue(); // All are equal case-insensitively
}
[Fact]
public void Verify_WithLessThanTwoRepetitions_Throws()
{
// Arrange
var verifier = new IdempotencyVerifier<int>(() => 42);
// Act
var act = () => verifier.Verify(() => { }, repetitions: 1);
// Assert
act.Should().Throw<ArgumentOutOfRangeException>();
}
[Fact]
public void ForString_CreatesStringVerifier()
{
// Arrange & Act
var verifier = IdempotencyVerifier.ForString(() => "test");
var result = verifier.Verify(() => { }, repetitions: 2);
// Assert
result.IsIdempotent.Should().BeTrue();
}
[Fact]
public void ForBytes_CreatesByteArrayVerifier()
{
// Arrange
var bytes = new byte[] { 1, 2, 3 };
var verifier = IdempotencyVerifier.ForBytes(() => bytes);
// Act
var result = verifier.Verify(() => { }, repetitions: 2);
// Assert
result.IsIdempotent.Should().BeTrue();
}
[Fact]
public void Summary_IdempotentSuccess_ReturnsCorrectMessage()
{
// Arrange
var verifier = new IdempotencyVerifier<int>(() => 42);
// Act
var result = verifier.Verify(() => { }, repetitions: 3);
// Assert
result.Summary.Should().Contain("Idempotent");
result.Summary.Should().Contain("3 executions");
}
[Fact]
public void SuccessRate_PartialFailures_CalculatesCorrectly()
{
// Arrange
var attempts = 0;
var verifier = new IdempotencyVerifier<int>(() => 42);
// Act - 1 failure out of 4 attempts
var result = verifier.Verify(() =>
{
attempts++;
if (attempts == 2)
{
throw new Exception("fail");
}
}, repetitions: 4);
// Assert
result.SuccessRate.Should().Be(0.75m); // 3 successes out of 4
}
}

View File

@@ -0,0 +1,183 @@
// <copyright file="LeapSecondTimeProviderTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
namespace StellaOps.Testing.Temporal.Tests;
[Trait("Category", "Unit")]
public sealed class LeapSecondTimeProviderTests
{
private static readonly DateTimeOffset StartTime = new(2016, 12, 31, 23, 0, 0, TimeSpan.Zero);
[Fact]
public void AdvanceThroughLeapSecond_ReturnsAllPhases()
{
// Arrange
var leapDay = new DateOnly(2016, 12, 31);
var provider = new LeapSecondTimeProvider(StartTime, leapDay);
// Act
var moments = provider.AdvanceThroughLeapSecond(leapDay).ToList();
// Assert
moments.Should().HaveCount(4);
moments[0].Phase.Should().Be(LeapSecondPhase.TwoSecondsBefore);
moments[1].Phase.Should().Be(LeapSecondPhase.OneSecondBefore);
moments[2].Phase.Should().Be(LeapSecondPhase.LeapSecond);
moments[3].Phase.Should().Be(LeapSecondPhase.AfterLeapSecond);
}
[Fact]
public void AdvanceThroughLeapSecond_HasCorrectTimes()
{
// Arrange
var leapDay = new DateOnly(2016, 12, 31);
var provider = new LeapSecondTimeProvider(StartTime, leapDay);
// Act
var moments = provider.AdvanceThroughLeapSecond(leapDay).ToList();
// Assert
moments[0].Time.Hour.Should().Be(23);
moments[0].Time.Minute.Should().Be(59);
moments[0].Time.Second.Should().Be(58);
moments[1].Time.Second.Should().Be(59);
// Leap second has same second as previous (simulating system behavior)
moments[2].Time.Second.Should().Be(59);
// After leap second is midnight next day
moments[3].Time.Day.Should().Be(1);
moments[3].Time.Month.Should().Be(1);
moments[3].Time.Year.Should().Be(2017);
moments[3].Time.Second.Should().Be(0);
}
[Fact]
public void HasLeapSecond_ReturnsTrueForConfiguredDates()
{
// Arrange
var leapDay1 = new DateOnly(2016, 12, 31);
var leapDay2 = new DateOnly(2015, 6, 30);
var provider = new LeapSecondTimeProvider(StartTime, leapDay1, leapDay2);
// Act & Assert
provider.HasLeapSecond(leapDay1).Should().BeTrue();
provider.HasLeapSecond(leapDay2).Should().BeTrue();
provider.HasLeapSecond(new DateOnly(2020, 1, 1)).Should().BeFalse();
}
[Fact]
public void WithHistoricalLeapSeconds_ContainsKnownDates()
{
// Arrange & Act
var provider = LeapSecondTimeProvider.WithHistoricalLeapSeconds(StartTime);
// Assert
provider.HasLeapSecond(new DateOnly(2016, 12, 31)).Should().BeTrue();
provider.HasLeapSecond(new DateOnly(2015, 6, 30)).Should().BeTrue();
provider.HasLeapSecond(new DateOnly(2012, 6, 30)).Should().BeTrue();
}
[Fact]
public void HistoricalLeapSeconds_ContainsRecentLeapSeconds()
{
// Assert
LeapSecondTimeProvider.HistoricalLeapSeconds.Should().Contain(new DateOnly(2016, 12, 31));
LeapSecondTimeProvider.HistoricalLeapSeconds.Should().HaveCountGreaterThanOrEqualTo(5);
}
[Fact]
public void Advance_DelegatesCorrectly()
{
// Arrange
var provider = new LeapSecondTimeProvider(StartTime);
var advancement = TimeSpan.FromHours(1);
// Act
provider.Advance(advancement);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(StartTime.Add(advancement));
}
[Fact]
public void JumpTo_DelegatesCorrectly()
{
// Arrange
var provider = new LeapSecondTimeProvider(StartTime);
var target = new DateTimeOffset(2017, 1, 1, 0, 0, 0, TimeSpan.Zero);
// Act
provider.JumpTo(target);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(target);
}
[Fact]
public void CreateSmearingProvider_ReturnsSmearingProvider()
{
// Arrange
var leapDay = new DateOnly(2016, 12, 31);
var provider = new LeapSecondTimeProvider(StartTime, leapDay);
// Act
var smearing = provider.CreateSmearingProvider(leapDay);
// Assert
smearing.Should().NotBeNull();
smearing.Should().BeOfType<SmearingTimeProvider>();
}
[Fact]
public void SmearingProvider_AppliesSmearDuringWindow()
{
// Arrange
var leapDay = new DateOnly(2016, 12, 31);
// Start at 6pm on leap day (inside 24-hour smear window, 6 hours before midnight)
// The window is centered on midnight: 12:00 to 12:00 next day
// At 18:00, we're 6 hours into the 24-hour window (25% progress)
var eveningTime = new DateTimeOffset(2016, 12, 31, 18, 0, 0, TimeSpan.Zero);
var innerProvider = new SimulatedTimeProvider(eveningTime);
var smearing = new SmearingTimeProvider(
innerProvider, leapDay, TimeSpan.FromHours(24));
// Act
var isActive = smearing.IsSmearingActive;
var offset = smearing.CurrentSmearOffset;
// Assert
isActive.Should().BeTrue();
offset.Should().BeGreaterThan(TimeSpan.Zero);
}
[Fact]
public void SmearingProvider_OutsideWindow_ReturnsNormalTime()
{
// Arrange
var leapDay = new DateOnly(2016, 12, 31);
// Start well before the smear window (December 30)
var earlyTime = new DateTimeOffset(2016, 12, 30, 0, 0, 0, TimeSpan.Zero);
var innerProvider = new SimulatedTimeProvider(earlyTime);
var smearing = new SmearingTimeProvider(
innerProvider, leapDay, TimeSpan.FromHours(24));
// Act
var isActive = smearing.IsSmearingActive;
var offset = smearing.CurrentSmearOffset;
var reportedTime = smearing.GetUtcNow();
// Assert
isActive.Should().BeFalse();
offset.Should().Be(TimeSpan.Zero);
reportedTime.Should().Be(earlyTime);
}
}

View File

@@ -0,0 +1,214 @@
// <copyright file="SimulatedTimeProviderTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
namespace StellaOps.Testing.Temporal.Tests;
[Trait("Category", "Unit")]
public sealed class SimulatedTimeProviderTests
{
private static readonly DateTimeOffset StartTime = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
[Fact]
public void GetUtcNow_ReturnsInitialTime()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
// Act
var result = provider.GetUtcNow();
// Assert
result.Should().Be(StartTime);
}
[Fact]
public void Advance_MovesTimeForward()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
var advancement = TimeSpan.FromMinutes(30);
// Act
provider.Advance(advancement);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(StartTime.Add(advancement));
}
[Fact]
public void Advance_WithNegativeDuration_Throws()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
// Act
var act = () => provider.Advance(TimeSpan.FromMinutes(-10));
// Assert
act.Should().Throw<ArgumentOutOfRangeException>();
}
[Fact]
public void JumpTo_SetsExactTime()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
var targetTime = new DateTimeOffset(2026, 6, 15, 18, 30, 0, TimeSpan.Zero);
// Act
provider.JumpTo(targetTime);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(targetTime);
}
[Fact]
public void JumpBackward_MovesTimeBackward()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
var backwardAmount = TimeSpan.FromHours(2);
// Act
provider.JumpBackward(backwardAmount);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(StartTime.Subtract(backwardAmount));
}
[Fact]
public void JumpBackward_RecordsInHistory()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
// Act
provider.JumpBackward(TimeSpan.FromHours(1));
// Assert
provider.HasJumpedBackward().Should().BeTrue();
provider.JumpHistory.Should().ContainSingle(j => j.JumpType == JumpType.JumpBackward);
}
[Fact]
public void SetDrift_AppliesDriftOnAdvance()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
var driftPerSecond = TimeSpan.FromMilliseconds(10); // 10ms fast per second
provider.SetDrift(driftPerSecond);
// Act - Advance 100 seconds
provider.Advance(TimeSpan.FromSeconds(100));
var result = provider.GetUtcNow();
// Assert - Should have 100 seconds + 1 second of drift (100 * 10ms)
var expectedTime = StartTime
.Add(TimeSpan.FromSeconds(100))
.Add(TimeSpan.FromSeconds(1)); // 100 * 10ms = 1000ms = 1s
result.Should().Be(expectedTime);
}
[Fact]
public void ClearDrift_StopsDriftApplication()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
provider.SetDrift(TimeSpan.FromMilliseconds(100));
provider.Advance(TimeSpan.FromSeconds(10)); // This will apply drift
var timeAfterDrift = provider.GetUtcNow();
provider.ClearDrift();
// Act
provider.Advance(TimeSpan.FromSeconds(10)); // This should not apply drift
var result = provider.GetUtcNow();
// Assert
result.Should().Be(timeAfterDrift.Add(TimeSpan.FromSeconds(10)));
}
[Fact]
public void JumpHistory_TracksAllJumps()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
// Act
provider.Advance(TimeSpan.FromMinutes(5));
provider.JumpTo(StartTime.AddHours(1));
provider.JumpBackward(TimeSpan.FromMinutes(30));
provider.Advance(TimeSpan.FromMinutes(10));
// Assert
provider.JumpHistory.Should().HaveCount(4);
provider.JumpHistory[0].JumpType.Should().Be(JumpType.Advance);
provider.JumpHistory[1].JumpType.Should().Be(JumpType.JumpForward);
provider.JumpHistory[2].JumpType.Should().Be(JumpType.JumpBackward);
provider.JumpHistory[3].JumpType.Should().Be(JumpType.Advance);
}
[Fact]
public void ClearHistory_RemovesAllJumpRecords()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
provider.Advance(TimeSpan.FromMinutes(5));
provider.JumpBackward(TimeSpan.FromMinutes(2));
// Act
provider.ClearHistory();
// Assert
provider.JumpHistory.Should().BeEmpty();
provider.HasJumpedBackward().Should().BeFalse(); // History is cleared
}
[Fact]
public async Task MultipleThreads_TimeIsConsistent()
{
// Arrange
var provider = new SimulatedTimeProvider(StartTime);
var results = new List<DateTimeOffset>();
var lockObj = new object();
var ct = TestContext.Current.CancellationToken;
// Act - Simulate concurrent reads while advancing
var tasks = new List<Task>();
for (int i = 0; i < 10; i++)
{
tasks.Add(Task.Run(() =>
{
for (int j = 0; j < 100; j++)
{
var time = provider.GetUtcNow();
lock (lockObj)
{
results.Add(time);
}
}
}, ct));
}
// Advance time in another thread
tasks.Add(Task.Run(() =>
{
for (int i = 0; i < 50; i++)
{
provider.Advance(TimeSpan.FromMilliseconds(10));
}
}, ct));
await Task.WhenAll(tasks);
// Assert - All results should be valid DateTimeOffsets (no corruption)
results.Should().OnlyContain(t => t >= StartTime);
}
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Testing.Temporal\StellaOps.Testing.Temporal.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,152 @@
// <copyright file="TtlBoundaryTimeProviderTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
namespace StellaOps.Testing.Temporal.Tests;
[Trait("Category", "Unit")]
public sealed class TtlBoundaryTimeProviderTests
{
private static readonly DateTimeOffset StartTime = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
private static readonly TimeSpan DefaultTtl = TimeSpan.FromMinutes(15);
[Fact]
public void PositionAtExpiryBoundary_SetsExactExpiryTime()
{
// Arrange
var provider = new TtlBoundaryTimeProvider(StartTime);
var createdAt = StartTime;
var expectedExpiry = createdAt.Add(DefaultTtl);
// Act
provider.PositionAtExpiryBoundary(createdAt, DefaultTtl);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(expectedExpiry);
}
[Fact]
public void PositionJustBeforeExpiry_Sets1msBeforeExpiry()
{
// Arrange
var provider = new TtlBoundaryTimeProvider(StartTime);
var createdAt = StartTime;
var expectedTime = createdAt.Add(DefaultTtl).AddMilliseconds(-1);
// Act
provider.PositionJustBeforeExpiry(createdAt, DefaultTtl);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(expectedTime);
}
[Fact]
public void PositionJustAfterExpiry_Sets1msAfterExpiry()
{
// Arrange
var provider = new TtlBoundaryTimeProvider(StartTime);
var createdAt = StartTime;
var expectedTime = createdAt.Add(DefaultTtl).AddMilliseconds(1);
// Act
provider.PositionJustAfterExpiry(createdAt, DefaultTtl);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(expectedTime);
}
[Fact]
public void PositionOneTickBeforeExpiry_Sets1TickBeforeExpiry()
{
// Arrange
var provider = new TtlBoundaryTimeProvider(StartTime);
var createdAt = StartTime;
var expectedTime = createdAt.Add(DefaultTtl).AddTicks(-1);
// Act
provider.PositionOneTickBeforeExpiry(createdAt, DefaultTtl);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(expectedTime);
}
[Fact]
public void GenerateBoundaryTestCases_ReturnsExpectedCases()
{
// Arrange
var createdAt = StartTime;
// Act
var cases = TtlBoundaryTimeProvider.GenerateBoundaryTestCases(createdAt, DefaultTtl).ToList();
// Assert
cases.Should().HaveCountGreaterThanOrEqualTo(8);
// Check specific expected cases
cases.Should().Contain(c => c.Name == "Exactly at expiry" && c.ShouldBeExpired);
cases.Should().Contain(c => c.Name == "1 tick before expiry" && !c.ShouldBeExpired);
cases.Should().Contain(c => c.Name == "1 tick after expiry" && c.ShouldBeExpired);
cases.Should().Contain(c => c.Name == "Just created" && !c.ShouldBeExpired);
}
[Theory]
[MemberData(nameof(GetBoundaryTestData))]
public void BoundaryTestCases_HaveCorrectExpiryExpectation(
string name,
DateTimeOffset time,
bool shouldBeExpired)
{
// This demonstrates how to use the generated test cases
var createdAt = new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
var ttl = TimeSpan.FromMinutes(15);
var expiry = createdAt.Add(ttl);
// Act
var isExpired = time >= expiry;
// Assert
isExpired.Should().Be(shouldBeExpired, $"Case '{name}' at {time:O}");
}
public static IEnumerable<object[]> GetBoundaryTestData()
{
var createdAt = new DateTimeOffset(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
var ttl = TimeSpan.FromMinutes(15);
return TtlBoundaryTimeProvider.GenerateTheoryData(createdAt, ttl);
}
[Fact]
public void Advance_DelegatesCorrectly()
{
// Arrange
var provider = new TtlBoundaryTimeProvider(StartTime);
// Act
provider.Advance(TimeSpan.FromMinutes(5));
var result = provider.GetUtcNow();
// Assert
result.Should().Be(StartTime.AddMinutes(5));
}
[Fact]
public void JumpTo_DelegatesCorrectly()
{
// Arrange
var provider = new TtlBoundaryTimeProvider(StartTime);
var target = new DateTimeOffset(2026, 12, 31, 23, 59, 59, TimeSpan.Zero);
// Act
provider.JumpTo(target);
var result = provider.GetUtcNow();
// Assert
result.Should().Be(target);
}
}

View File

@@ -0,0 +1,343 @@
// <copyright file="ClockSkewAssertions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Testing.Temporal;
/// <summary>
/// Assertions for verifying correct behavior under clock skew conditions.
/// </summary>
public static class ClockSkewAssertions
{
/// <summary>
/// Default tolerance for acceptable clock skew.
/// </summary>
public static readonly TimeSpan DefaultSkewTolerance = TimeSpan.FromMinutes(5);
/// <summary>
/// Assert that operation handles forward clock jump correctly.
/// </summary>
/// <typeparam name="T">The result type.</typeparam>
/// <param name="timeProvider">The simulated time provider.</param>
/// <param name="operation">The operation to test.</param>
/// <param name="jumpAmount">Amount of time to jump forward.</param>
/// <param name="isValidResult">Predicate to validate the result.</param>
/// <param name="message">Optional failure message.</param>
/// <exception cref="ClockSkewAssertionException">Thrown if assertion fails.</exception>
public static async Task AssertHandlesClockJumpForwardAsync<T>(
SimulatedTimeProvider timeProvider,
Func<Task<T>> operation,
TimeSpan jumpAmount,
Func<T, bool> isValidResult,
string? message = null)
{
// Execute before jump
var beforeJump = await operation();
if (!isValidResult(beforeJump))
{
throw new ClockSkewAssertionException(
$"Operation failed before clock jump. {message}");
}
// Jump forward
timeProvider.Advance(jumpAmount);
// Execute after jump
var afterJump = await operation();
if (!isValidResult(afterJump))
{
throw new ClockSkewAssertionException(
$"Operation failed after forward clock jump of {jumpAmount}. {message}");
}
}
/// <summary>
/// Assert that operation handles backward clock jump (NTP correction).
/// </summary>
/// <typeparam name="T">The result type.</typeparam>
/// <param name="timeProvider">The simulated time provider.</param>
/// <param name="operation">The operation to test.</param>
/// <param name="jumpAmount">Amount of time to jump backward.</param>
/// <param name="isValidResult">Predicate to validate the result.</param>
/// <param name="allowFailure">If true, operation may throw instead of returning invalid result.</param>
/// <param name="message">Optional failure message.</param>
/// <exception cref="ClockSkewAssertionException">Thrown if assertion fails unexpectedly.</exception>
public static async Task AssertHandlesClockJumpBackwardAsync<T>(
SimulatedTimeProvider timeProvider,
Func<Task<T>> operation,
TimeSpan jumpAmount,
Func<T, bool> isValidResult,
bool allowFailure = false,
string? message = null)
{
// Execute before jump
var beforeJump = await operation();
if (!isValidResult(beforeJump))
{
throw new ClockSkewAssertionException(
$"Operation failed before clock jump. {message}");
}
// Jump backward
timeProvider.JumpBackward(jumpAmount);
// Execute after jump - may fail or succeed depending on implementation
try
{
var afterJump = await operation();
if (!isValidResult(afterJump))
{
if (!allowFailure)
{
throw new ClockSkewAssertionException(
$"Operation returned invalid result after backward clock jump of {jumpAmount}. {message}");
}
}
}
catch (Exception ex) when (ex is not ClockSkewAssertionException)
{
if (!allowFailure)
{
throw new ClockSkewAssertionException(
$"Operation threw exception after backward clock jump of {jumpAmount}: {ex.Message}. {message}", ex);
}
// If allowFailure is true, swallow the exception as expected behavior
}
}
/// <summary>
/// Assert that operation handles clock drift correctly over time.
/// </summary>
/// <typeparam name="T">The result type.</typeparam>
/// <param name="timeProvider">The simulated time provider.</param>
/// <param name="operation">The operation to test.</param>
/// <param name="driftPerSecond">Drift amount per second.</param>
/// <param name="testDuration">Total duration to test over.</param>
/// <param name="stepInterval">Interval between test steps.</param>
/// <param name="isValidResult">Predicate to validate the result.</param>
/// <param name="message">Optional failure message.</param>
/// <returns>Report of the drift test.</returns>
/// <exception cref="ClockSkewAssertionException">Thrown if too many failures occur.</exception>
public static async Task<ClockDriftTestReport> AssertHandlesClockDriftAsync<T>(
SimulatedTimeProvider timeProvider,
Func<Task<T>> operation,
TimeSpan driftPerSecond,
TimeSpan testDuration,
TimeSpan stepInterval,
Func<T, bool> isValidResult,
string? message = null)
{
timeProvider.SetDrift(driftPerSecond);
var elapsed = TimeSpan.Zero;
var results = new List<ClockDriftStepResult>();
try
{
while (elapsed < testDuration)
{
var stepTime = timeProvider.GetUtcNow();
bool succeeded;
string? error = null;
try
{
var result = await operation();
succeeded = isValidResult(result);
if (!succeeded)
{
error = "Invalid result";
}
}
catch (Exception ex)
{
succeeded = false;
error = ex.Message;
}
results.Add(new ClockDriftStepResult(
elapsed,
stepTime,
timeProvider.GetTotalDriftApplied(),
succeeded,
error));
timeProvider.Advance(stepInterval);
elapsed = elapsed.Add(stepInterval);
}
}
finally
{
timeProvider.ClearDrift();
}
var report = new ClockDriftTestReport(
DriftPerSecond: driftPerSecond,
TestDuration: testDuration,
Steps: [.. results],
TotalSteps: results.Count,
FailedSteps: results.Count(r => !r.Succeeded),
TotalDriftApplied: timeProvider.GetTotalDriftApplied());
if (report.FailedSteps > 0)
{
var failedAt = results.Where(r => !r.Succeeded).Select(r => r.Elapsed).ToList();
throw new ClockSkewAssertionException(
$"Operation failed under clock drift of {driftPerSecond}/s at: {string.Join(", ", failedAt)}. " +
$"{report.FailedSteps} of {report.TotalSteps} steps failed. {message}");
}
return report;
}
/// <summary>
/// Assert that two timestamps are within acceptable skew tolerance.
/// </summary>
/// <param name="expected">Expected timestamp.</param>
/// <param name="actual">Actual timestamp.</param>
/// <param name="tolerance">Acceptable tolerance (default: 5 minutes).</param>
/// <param name="message">Optional failure message.</param>
/// <exception cref="ClockSkewAssertionException">Thrown if timestamps differ by more than tolerance.</exception>
public static void AssertTimestampsWithinTolerance(
DateTimeOffset expected,
DateTimeOffset actual,
TimeSpan? tolerance = null,
string? message = null)
{
var maxDiff = tolerance ?? DefaultSkewTolerance;
var diff = (actual - expected).Duration();
if (diff > maxDiff)
{
throw new ClockSkewAssertionException(
$"Timestamps differ by {diff}, which exceeds tolerance of {maxDiff}. " +
$"Expected: {expected:O}, Actual: {actual:O}. {message}");
}
}
/// <summary>
/// Assert that timestamps are monotonically increasing.
/// </summary>
/// <param name="timestamps">Sequence of timestamps.</param>
/// <param name="allowEqual">If true, equal consecutive timestamps are allowed.</param>
/// <param name="message">Optional failure message.</param>
/// <exception cref="ClockSkewAssertionException">Thrown if timestamps are not monotonic.</exception>
public static void AssertMonotonicTimestamps(
IEnumerable<DateTimeOffset> timestamps,
bool allowEqual = false,
string? message = null)
{
var list = timestamps.ToList();
for (int i = 1; i < list.Count; i++)
{
var prev = list[i - 1];
var curr = list[i];
var violation = allowEqual
? curr < prev
: curr <= prev;
if (violation)
{
throw new ClockSkewAssertionException(
$"Timestamps are not monotonically increasing at index {i}. " +
$"Previous: {prev:O}, Current: {curr:O}. {message}");
}
}
}
/// <summary>
/// Assert that an operation completes within expected time bounds despite clock skew.
/// </summary>
/// <param name="timeProvider">The simulated time provider.</param>
/// <param name="operation">The operation to test.</param>
/// <param name="maxExpectedDuration">Maximum expected duration.</param>
/// <param name="skewAmount">Amount of clock skew to apply during operation.</param>
/// <param name="message">Optional failure message.</param>
public static async Task AssertCompletesWithinBoundsAsync(
SimulatedTimeProvider timeProvider,
Func<Task> operation,
TimeSpan maxExpectedDuration,
TimeSpan skewAmount,
string? message = null)
{
var startTime = timeProvider.GetUtcNow();
// Apply skew midway through operation
var operationTask = operation();
timeProvider.Advance(skewAmount);
await operationTask;
var endTime = timeProvider.GetUtcNow();
var apparentDuration = endTime - startTime;
// The apparent duration includes the skew, so we need to account for it
var actualDuration = apparentDuration - skewAmount;
if (actualDuration > maxExpectedDuration)
{
throw new ClockSkewAssertionException(
$"Operation took {actualDuration} (apparent: {apparentDuration}), " +
$"which exceeds maximum of {maxExpectedDuration}. {message}");
}
}
}
/// <summary>
/// Exception thrown when a clock skew assertion fails.
/// </summary>
public class ClockSkewAssertionException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="ClockSkewAssertionException"/> class.
/// </summary>
public ClockSkewAssertionException(string message) : base(message) { }
/// <summary>
/// Initializes a new instance with inner exception.
/// </summary>
public ClockSkewAssertionException(string message, Exception inner) : base(message, inner) { }
}
/// <summary>
/// Report from a clock drift test.
/// </summary>
/// <param name="DriftPerSecond">The drift rate tested.</param>
/// <param name="TestDuration">Total duration of the test.</param>
/// <param name="Steps">Results from each test step.</param>
/// <param name="TotalSteps">Total number of steps executed.</param>
/// <param name="FailedSteps">Number of steps that failed.</param>
/// <param name="TotalDriftApplied">Total amount of drift applied during test.</param>
public sealed record ClockDriftTestReport(
TimeSpan DriftPerSecond,
TimeSpan TestDuration,
ImmutableArray<ClockDriftStepResult> Steps,
int TotalSteps,
int FailedSteps,
TimeSpan TotalDriftApplied)
{
/// <summary>
/// Gets the success rate as a percentage.
/// </summary>
public decimal SuccessRate => TotalSteps > 0
? (decimal)(TotalSteps - FailedSteps) / TotalSteps * 100
: 0;
}
/// <summary>
/// Result of a single step in a clock drift test.
/// </summary>
/// <param name="Elapsed">Elapsed time since test start.</param>
/// <param name="SimulatedTime">The simulated time at this step.</param>
/// <param name="DriftApplied">Total drift applied at this step.</param>
/// <param name="Succeeded">Whether the step succeeded.</param>
/// <param name="Error">Error message if step failed.</param>
public sealed record ClockDriftStepResult(
TimeSpan Elapsed,
DateTimeOffset SimulatedTime,
TimeSpan DriftApplied,
bool Succeeded,
string? Error);

View File

@@ -0,0 +1,343 @@
// <copyright file="IdempotencyVerifier.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Testing.Temporal;
/// <summary>
/// Framework for verifying idempotency of operations under retry scenarios.
/// Ensures that repeated executions of the same operation produce consistent state.
/// </summary>
/// <typeparam name="TState">The type of state to compare.</typeparam>
public sealed class IdempotencyVerifier<TState> where TState : notnull
{
private readonly Func<TState> _getState;
private readonly IEqualityComparer<TState>? _comparer;
/// <summary>
/// Initializes a new instance of the <see cref="IdempotencyVerifier{TState}"/> class.
/// </summary>
/// <param name="getState">Function to capture current state.</param>
/// <param name="comparer">Optional comparer for state equality.</param>
public IdempotencyVerifier(
Func<TState> getState,
IEqualityComparer<TState>? comparer = null)
{
_getState = getState ?? throw new ArgumentNullException(nameof(getState));
_comparer = comparer;
}
/// <summary>
/// Verify that executing an operation multiple times produces consistent state.
/// </summary>
/// <param name="operation">The operation to execute.</param>
/// <param name="repetitions">Number of times to execute the operation.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Result indicating whether the operation is idempotent.</returns>
public async Task<IdempotencyResult<TState>> VerifyAsync(
Func<Task> operation,
int repetitions = 3,
CancellationToken ct = default)
{
if (repetitions < 2)
{
throw new ArgumentOutOfRangeException(nameof(repetitions), "At least 2 repetitions required");
}
var states = new List<TState>();
var exceptions = new List<IdempotencyException>();
for (int i = 0; i < repetitions; i++)
{
ct.ThrowIfCancellationRequested();
try
{
await operation();
states.Add(_getState());
}
catch (Exception ex)
{
exceptions.Add(new IdempotencyException(i, ex));
}
}
return BuildResult(states, exceptions, repetitions);
}
/// <summary>
/// Verify idempotency with simulated retries including delays.
/// </summary>
/// <param name="operation">The operation to execute.</param>
/// <param name="retryDelays">Delays between retry attempts.</param>
/// <param name="timeProvider">Time provider for simulating delays.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Result indicating whether the operation is idempotent under retries.</returns>
public async Task<IdempotencyResult<TState>> VerifyWithRetriesAsync(
Func<Task> operation,
TimeSpan[] retryDelays,
SimulatedTimeProvider timeProvider,
CancellationToken ct = default)
{
var states = new List<TState>();
var exceptions = new List<IdempotencyException>();
// First attempt
try
{
await operation();
states.Add(_getState());
}
catch (Exception ex)
{
exceptions.Add(new IdempotencyException(0, ex));
}
// Retry attempts with delays
for (int i = 0; i < retryDelays.Length; i++)
{
ct.ThrowIfCancellationRequested();
timeProvider.Advance(retryDelays[i]);
try
{
await operation();
states.Add(_getState());
}
catch (Exception ex)
{
exceptions.Add(new IdempotencyException(i + 1, ex));
}
}
return BuildResult(states, exceptions, retryDelays.Length + 1);
}
/// <summary>
/// Verify idempotency with exponential backoff retry pattern.
/// </summary>
/// <param name="operation">The operation to execute.</param>
/// <param name="maxRetries">Maximum number of retries.</param>
/// <param name="initialDelay">Initial delay before first retry.</param>
/// <param name="timeProvider">Time provider for simulating delays.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Result indicating whether the operation is idempotent.</returns>
public async Task<IdempotencyResult<TState>> VerifyWithExponentialBackoffAsync(
Func<Task> operation,
int maxRetries,
TimeSpan initialDelay,
SimulatedTimeProvider timeProvider,
CancellationToken ct = default)
{
var delays = new TimeSpan[maxRetries];
var currentDelay = initialDelay;
for (int i = 0; i < maxRetries; i++)
{
delays[i] = currentDelay;
currentDelay = TimeSpan.FromTicks(currentDelay.Ticks * 2); // Exponential backoff
}
return await VerifyWithRetriesAsync(operation, delays, timeProvider, ct);
}
/// <summary>
/// Verify idempotency for synchronous operations.
/// </summary>
/// <param name="operation">The synchronous operation to execute.</param>
/// <param name="repetitions">Number of times to execute the operation.</param>
/// <returns>Result indicating whether the operation is idempotent.</returns>
public IdempotencyResult<TState> Verify(
Action operation,
int repetitions = 3)
{
if (repetitions < 2)
{
throw new ArgumentOutOfRangeException(nameof(repetitions), "At least 2 repetitions required");
}
var states = new List<TState>();
var exceptions = new List<IdempotencyException>();
for (int i = 0; i < repetitions; i++)
{
try
{
operation();
states.Add(_getState());
}
catch (Exception ex)
{
exceptions.Add(new IdempotencyException(i, ex));
}
}
return BuildResult(states, exceptions, repetitions);
}
private IdempotencyResult<TState> BuildResult(
List<TState> states,
List<IdempotencyException> exceptions,
int repetitions)
{
var isIdempotent = states.Count > 1 &&
states.Skip(1).All(s => AreEqual(states[0], s));
return new IdempotencyResult<TState>(
IsIdempotent: isIdempotent,
States: [.. states],
Exceptions: [.. exceptions],
Repetitions: repetitions,
FirstState: states.Count > 0 ? states[0] : default,
DivergentStates: FindDivergentStates(states));
}
private bool AreEqual(TState a, TState b) =>
_comparer?.Equals(a, b) ?? EqualityComparer<TState>.Default.Equals(a, b);
private ImmutableArray<DivergentState<TState>> FindDivergentStates(List<TState> states)
{
if (states.Count < 2)
{
return [];
}
var first = states[0];
return states
.Select((s, i) => (Index: i, State: s))
.Where(x => x.Index > 0 && !AreEqual(first, x.State))
.Select(x => new DivergentState<TState>(x.Index, x.State))
.ToImmutableArray();
}
}
/// <summary>
/// Result of idempotency verification.
/// </summary>
/// <typeparam name="TState">The type of state compared.</typeparam>
/// <param name="IsIdempotent">Whether the operation is idempotent.</param>
/// <param name="States">All captured states.</param>
/// <param name="Exceptions">Any exceptions that occurred.</param>
/// <param name="Repetitions">Number of repetitions attempted.</param>
/// <param name="FirstState">The state after first execution.</param>
/// <param name="DivergentStates">States that diverged from the first state.</param>
public sealed record IdempotencyResult<TState>(
bool IsIdempotent,
ImmutableArray<TState> States,
ImmutableArray<IdempotencyException> Exceptions,
int Repetitions,
TState? FirstState,
ImmutableArray<DivergentState<TState>> DivergentStates)
{
/// <summary>
/// Gets whether all executions succeeded (no exceptions).
/// </summary>
public bool AllSucceeded => Exceptions.Length == 0;
/// <summary>
/// Gets the success rate as a decimal between 0 and 1.
/// </summary>
public decimal SuccessRate => Repetitions > 0
? (decimal)States.Length / Repetitions
: 0;
/// <summary>
/// Gets a human-readable summary of the result.
/// </summary>
public string Summary
{
get
{
if (IsIdempotent && AllSucceeded)
{
return $"Idempotent: {Repetitions} executions produced identical state";
}
else if (!AllSucceeded)
{
return $"Not idempotent: {Exceptions.Length} of {Repetitions} executions failed";
}
else
{
return $"Not idempotent: {DivergentStates.Length} of {Repetitions} executions produced different state";
}
}
}
}
/// <summary>
/// Represents a state that diverged from the expected (first) state.
/// </summary>
/// <typeparam name="TState">The type of state.</typeparam>
/// <param name="ExecutionIndex">The index of the execution that produced this state.</param>
/// <param name="State">The divergent state.</param>
public sealed record DivergentState<TState>(
int ExecutionIndex,
TState State);
/// <summary>
/// Represents an exception that occurred during idempotency verification.
/// </summary>
/// <param name="ExecutionIndex">The index of the execution that failed.</param>
/// <param name="Exception">The exception that occurred.</param>
public sealed record IdempotencyException(
int ExecutionIndex,
Exception Exception);
/// <summary>
/// Static factory methods for IdempotencyVerifier.
/// </summary>
public static class IdempotencyVerifier
{
/// <summary>
/// Create a verifier for string state.
/// </summary>
public static IdempotencyVerifier<string> ForString(Func<string> getState) =>
new(getState, StringComparer.Ordinal);
/// <summary>
/// Create a verifier for byte array state (e.g., hashes).
/// </summary>
public static IdempotencyVerifier<byte[]> ForBytes(Func<byte[]> getState) =>
new(getState, ByteArrayComparer.Instance);
/// <summary>
/// Create a verifier that uses JSON serialization for comparison.
/// </summary>
public static IdempotencyVerifier<TState> ForJson<TState>(
Func<TState> getState,
Func<TState, string> serialize) where TState : notnull =>
new(getState, new JsonSerializationComparer<TState>(serialize));
private sealed class ByteArrayComparer : IEqualityComparer<byte[]>
{
public static readonly ByteArrayComparer Instance = new();
public bool Equals(byte[]? x, byte[]? y)
{
if (ReferenceEquals(x, y)) return true;
if (x is null || y is null) return false;
return x.SequenceEqual(y);
}
public int GetHashCode(byte[] obj)
{
if (obj.Length == 0) return 0;
return HashCode.Combine(obj[0], obj.Length, obj[^1]);
}
}
private sealed class JsonSerializationComparer<T>(Func<T, string> serialize) : IEqualityComparer<T>
{
public bool Equals(T? x, T? y)
{
if (ReferenceEquals(x, y)) return true;
if (x is null || y is null) return false;
return serialize(x) == serialize(y);
}
public int GetHashCode(T obj) => serialize(obj).GetHashCode();
}
}

View File

@@ -0,0 +1,256 @@
// <copyright file="LeapSecondTimeProvider.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Testing.Temporal;
/// <summary>
/// TimeProvider that can simulate leap second scenarios.
/// Leap seconds are inserted at the end of UTC days, typically June 30 or December 31.
/// </summary>
public sealed class LeapSecondTimeProvider : TimeProvider
{
private readonly SimulatedTimeProvider _inner;
private readonly HashSet<DateOnly> _leapSecondDates;
/// <summary>
/// Known historical leap second dates (UTC).
/// </summary>
public static readonly ImmutableArray<DateOnly> HistoricalLeapSeconds =
[
new DateOnly(2016, 12, 31), // Last positive leap second to date
new DateOnly(2015, 6, 30),
new DateOnly(2012, 6, 30),
new DateOnly(2008, 12, 31),
new DateOnly(2005, 12, 31),
];
/// <summary>
/// Initializes a new instance of the <see cref="LeapSecondTimeProvider"/> class.
/// </summary>
/// <param name="startTime">The initial time.</param>
/// <param name="leapSecondDates">Dates that have leap seconds at the end (midnight UTC).</param>
public LeapSecondTimeProvider(DateTimeOffset startTime, params DateOnly[] leapSecondDates)
{
_inner = new SimulatedTimeProvider(startTime);
_leapSecondDates = [.. leapSecondDates];
}
/// <summary>
/// Creates a provider with historical leap second dates.
/// </summary>
public static LeapSecondTimeProvider WithHistoricalLeapSeconds(DateTimeOffset startTime)
{
return new LeapSecondTimeProvider(startTime, [.. HistoricalLeapSeconds]);
}
/// <inheritdoc/>
public override DateTimeOffset GetUtcNow() => _inner.GetUtcNow();
/// <summary>
/// Advance through a leap second, yielding timestamps including the leap second moment.
/// </summary>
/// <param name="leapSecondDay">The day that has a leap second at the end.</param>
/// <returns>Sequence of timestamps through the leap second.</returns>
/// <remarks>
/// Returns:
/// 1. 23:59:58 - Two seconds before midnight
/// 2. 23:59:59 - One second before midnight
/// 3. 23:59:59 - Leap second (repeated second, common system behavior)
/// 4. 00:00:00 - Midnight of next day
/// </remarks>
public IEnumerable<LeapSecondMoment> AdvanceThroughLeapSecond(DateOnly leapSecondDay)
{
var midnight = new DateTimeOffset(
leapSecondDay.Year,
leapSecondDay.Month,
leapSecondDay.Day,
0, 0, 0, TimeSpan.Zero).AddDays(1);
// Position just before midnight
_inner.JumpTo(midnight.AddSeconds(-2));
yield return new LeapSecondMoment(
_inner.GetUtcNow(),
LeapSecondPhase.TwoSecondsBefore,
"23:59:58");
_inner.Advance(TimeSpan.FromSeconds(1));
yield return new LeapSecondMoment(
_inner.GetUtcNow(),
LeapSecondPhase.OneSecondBefore,
"23:59:59");
// Leap second - system might report 23:59:60 or repeat 23:59:59
// Most systems repeat 23:59:59 (smear or step)
yield return new LeapSecondMoment(
_inner.GetUtcNow(), // Same time - this is the leap second
LeapSecondPhase.LeapSecond,
"23:59:60 (or repeated 23:59:59)");
_inner.Advance(TimeSpan.FromSeconds(1));
yield return new LeapSecondMoment(
_inner.GetUtcNow(),
LeapSecondPhase.AfterLeapSecond,
"00:00:00 next day");
}
/// <summary>
/// Simulate Google-style leap second smearing over 24 hours.
/// </summary>
/// <param name="leapSecondDay">The day that has a leap second.</param>
/// <param name="smearWindow">Total smear window (default 24 hours).</param>
/// <returns>A time provider that applies smearing.</returns>
public SmearingTimeProvider CreateSmearingProvider(
DateOnly leapSecondDay,
TimeSpan? smearWindow = null)
{
return new SmearingTimeProvider(_inner, leapSecondDay, smearWindow ?? TimeSpan.FromHours(24));
}
/// <summary>
/// Advance time by specified duration.
/// </summary>
public void Advance(TimeSpan duration) => _inner.Advance(duration);
/// <summary>
/// Jump to specific time.
/// </summary>
public void JumpTo(DateTimeOffset target) => _inner.JumpTo(target);
/// <summary>
/// Check if a date has a leap second.
/// </summary>
public bool HasLeapSecond(DateOnly date) => _leapSecondDates.Contains(date);
}
/// <summary>
/// Represents a moment during leap second transition.
/// </summary>
public sealed record LeapSecondMoment(
DateTimeOffset Time,
LeapSecondPhase Phase,
string Description);
/// <summary>
/// Phase of leap second transition.
/// </summary>
public enum LeapSecondPhase
{
/// <summary>Two seconds before the leap second.</summary>
TwoSecondsBefore,
/// <summary>One second before the leap second.</summary>
OneSecondBefore,
/// <summary>The leap second itself (23:59:60 or repeated 23:59:59).</summary>
LeapSecond,
/// <summary>After the leap second (00:00:00 next day).</summary>
AfterLeapSecond
}
/// <summary>
/// TimeProvider that applies leap second smearing over a window.
/// </summary>
public sealed class SmearingTimeProvider : TimeProvider
{
private readonly SimulatedTimeProvider _inner;
private readonly DateOnly _leapSecondDay;
private readonly TimeSpan _smearWindow;
private readonly DateTimeOffset _smearStart;
private readonly DateTimeOffset _smearEnd;
/// <summary>
/// Initializes a new instance of the <see cref="SmearingTimeProvider"/> class.
/// </summary>
/// <param name="inner">The underlying time provider.</param>
/// <param name="leapSecondDay">The day that has a leap second.</param>
/// <param name="smearWindow">The total smear window duration.</param>
public SmearingTimeProvider(
SimulatedTimeProvider inner,
DateOnly leapSecondDay,
TimeSpan smearWindow)
{
_inner = inner;
_leapSecondDay = leapSecondDay;
_smearWindow = smearWindow;
var midnight = new DateTimeOffset(
leapSecondDay.Year,
leapSecondDay.Month,
leapSecondDay.Day,
0, 0, 0, TimeSpan.Zero).AddDays(1);
_smearStart = midnight.Subtract(smearWindow / 2);
_smearEnd = midnight.Add(smearWindow / 2);
}
/// <inheritdoc/>
public override DateTimeOffset GetUtcNow()
{
var innerTime = _inner.GetUtcNow();
// If outside smear window, return normal time
if (innerTime < _smearStart || innerTime > _smearEnd)
{
return innerTime;
}
// Calculate smear offset
// Over the smear window, we add 1 second linearly
var progress = (innerTime - _smearStart).TotalMilliseconds / _smearWindow.TotalMilliseconds;
var smearOffset = TimeSpan.FromSeconds(progress);
// During first half of window, we're slowing down (subtracting offset)
// During second half, we're catching up
var midnight = new DateTimeOffset(
_leapSecondDay.Year,
_leapSecondDay.Month,
_leapSecondDay.Day,
0, 0, 0, TimeSpan.Zero).AddDays(1);
if (innerTime < midnight)
{
// Before midnight: time runs slow (subtract partial second)
return innerTime.Subtract(TimeSpan.FromSeconds(progress));
}
else
{
// After midnight: time catches up (subtract diminishing offset)
var remaining = 1.0 - progress;
return innerTime.Subtract(TimeSpan.FromSeconds(remaining));
}
}
/// <summary>
/// Gets whether smearing is currently active.
/// </summary>
public bool IsSmearingActive
{
get
{
var now = _inner.GetUtcNow();
return now >= _smearStart && now <= _smearEnd;
}
}
/// <summary>
/// Gets the current smear offset being applied.
/// </summary>
public TimeSpan CurrentSmearOffset
{
get
{
var innerTime = _inner.GetUtcNow();
if (innerTime < _smearStart || innerTime > _smearEnd)
{
return TimeSpan.Zero;
}
var progress = (innerTime - _smearStart).TotalMilliseconds / _smearWindow.TotalMilliseconds;
return TimeSpan.FromSeconds(progress > 0.5 ? 1.0 - progress : progress);
}
}
}

View File

@@ -0,0 +1,251 @@
// <copyright file="SimulatedTimeProvider.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Testing.Temporal;
/// <summary>
/// TimeProvider that supports time progression, jumps, drift simulation, and clock anomalies.
/// Extends FakeTimeProvider with additional capabilities for testing temporal edge cases.
/// </summary>
public sealed class SimulatedTimeProvider : TimeProvider
{
private readonly object _lock = new();
private DateTimeOffset _currentTime;
private TimeSpan _driftPerSecond = TimeSpan.Zero;
private readonly List<TimeJump> _jumpHistory = [];
/// <summary>
/// Initializes a new instance of the <see cref="SimulatedTimeProvider"/> class.
/// </summary>
/// <param name="startTime">The initial time.</param>
public SimulatedTimeProvider(DateTimeOffset startTime)
{
_currentTime = startTime;
}
/// <summary>
/// Initializes a new instance with current UTC time.
/// </summary>
public SimulatedTimeProvider()
: this(DateTimeOffset.UtcNow)
{
}
/// <summary>
/// Gets the current simulated UTC time.
/// </summary>
public override DateTimeOffset GetUtcNow()
{
lock (_lock)
{
return _currentTime;
}
}
/// <summary>
/// Gets the history of time jumps for debugging/assertion purposes.
/// </summary>
public ImmutableArray<TimeJump> JumpHistory
{
get
{
lock (_lock)
{
return [.. _jumpHistory];
}
}
}
/// <summary>
/// Gets the current drift rate per real second.
/// </summary>
public TimeSpan DriftPerSecond
{
get
{
lock (_lock)
{
return _driftPerSecond;
}
}
}
/// <summary>
/// Advance time by specified duration, applying any configured drift.
/// </summary>
/// <param name="duration">The duration to advance.</param>
public void Advance(TimeSpan duration)
{
if (duration < TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(duration), "Use JumpBackward for negative time changes");
}
lock (_lock)
{
var previousTime = _currentTime;
_currentTime = _currentTime.Add(duration);
// Apply drift if configured
if (_driftPerSecond != TimeSpan.Zero)
{
var driftAmount = TimeSpan.FromTicks(
(long)(_driftPerSecond.Ticks * duration.TotalSeconds));
_currentTime = _currentTime.Add(driftAmount);
}
_jumpHistory.Add(new TimeJump(
JumpType.Advance,
previousTime,
_currentTime,
duration));
}
}
/// <summary>
/// Jump to specific time (simulates clock correction/NTP sync).
/// </summary>
/// <param name="target">The target time to jump to.</param>
public void JumpTo(DateTimeOffset target)
{
lock (_lock)
{
var previousTime = _currentTime;
var delta = target - _currentTime;
_currentTime = target;
_jumpHistory.Add(new TimeJump(
delta >= TimeSpan.Zero ? JumpType.JumpForward : JumpType.JumpBackward,
previousTime,
_currentTime,
delta));
}
}
/// <summary>
/// Simulate clock going backwards (NTP correction scenario).
/// </summary>
/// <param name="duration">The amount to jump backward.</param>
public void JumpBackward(TimeSpan duration)
{
if (duration < TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(duration), "Duration must be positive");
}
lock (_lock)
{
var previousTime = _currentTime;
_currentTime = _currentTime.Subtract(duration);
_jumpHistory.Add(new TimeJump(
JumpType.JumpBackward,
previousTime,
_currentTime,
-duration));
}
}
/// <summary>
/// Configure clock drift rate.
/// </summary>
/// <param name="driftPerRealSecond">Drift amount per real second. Positive = fast, negative = slow.</param>
public void SetDrift(TimeSpan driftPerRealSecond)
{
lock (_lock)
{
_driftPerSecond = driftPerRealSecond;
}
}
/// <summary>
/// Clear drift configuration.
/// </summary>
public void ClearDrift()
{
lock (_lock)
{
_driftPerSecond = TimeSpan.Zero;
}
}
/// <summary>
/// Simulate time standing still (frozen clock scenario).
/// </summary>
/// <param name="action">Action to execute while time is frozen.</param>
public async Task WithFrozenTimeAsync(Func<Task> action)
{
// Time doesn't advance automatically, so just execute the action
// This is useful for documenting intent in tests
await action();
}
/// <summary>
/// Reset jump history.
/// </summary>
public void ClearHistory()
{
lock (_lock)
{
_jumpHistory.Clear();
}
}
/// <summary>
/// Check if time has ever jumped backward.
/// </summary>
public bool HasJumpedBackward()
{
lock (_lock)
{
return _jumpHistory.Any(j => j.JumpType == JumpType.JumpBackward);
}
}
/// <summary>
/// Get total drift applied.
/// </summary>
public TimeSpan GetTotalDriftApplied()
{
lock (_lock)
{
if (_driftPerSecond == TimeSpan.Zero)
{
return TimeSpan.Zero;
}
var totalAdvanced = _jumpHistory
.Where(j => j.JumpType == JumpType.Advance)
.Sum(j => j.Delta.TotalSeconds);
return TimeSpan.FromTicks((long)(_driftPerSecond.Ticks * totalAdvanced));
}
}
}
/// <summary>
/// Represents a time jump event.
/// </summary>
public sealed record TimeJump(
JumpType JumpType,
DateTimeOffset Before,
DateTimeOffset After,
TimeSpan Delta);
/// <summary>
/// Type of time jump.
/// </summary>
public enum JumpType
{
/// <summary>Normal time advancement.</summary>
Advance,
/// <summary>Forward jump (e.g., NTP sync forward).</summary>
JumpForward,
/// <summary>Backward jump (e.g., NTP correction backward).</summary>
JumpBackward
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>true</IsPackable>
<Description>Temporal testing utilities for time-skew simulation, idempotency verification, and temporal edge case testing</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,185 @@
// <copyright file="TtlBoundaryTimeProvider.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Testing.Temporal;
/// <summary>
/// TimeProvider specialized for testing TTL/expiry boundary conditions.
/// Provides convenient methods for positioning time at exact boundaries.
/// </summary>
public sealed class TtlBoundaryTimeProvider : TimeProvider
{
private readonly SimulatedTimeProvider _inner;
/// <summary>
/// Initializes a new instance of the <see cref="TtlBoundaryTimeProvider"/> class.
/// </summary>
/// <param name="startTime">The initial time.</param>
public TtlBoundaryTimeProvider(DateTimeOffset startTime)
{
_inner = new SimulatedTimeProvider(startTime);
}
/// <inheritdoc/>
public override DateTimeOffset GetUtcNow() => _inner.GetUtcNow();
/// <summary>
/// Position time exactly at TTL expiry boundary.
/// </summary>
/// <param name="itemCreatedAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
public void PositionAtExpiryBoundary(DateTimeOffset itemCreatedAt, TimeSpan ttl)
{
var expiryTime = itemCreatedAt.Add(ttl);
_inner.JumpTo(expiryTime);
}
/// <summary>
/// Position time 1ms before expiry (should be valid).
/// </summary>
/// <param name="itemCreatedAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
public void PositionJustBeforeExpiry(DateTimeOffset itemCreatedAt, TimeSpan ttl)
{
var expiryTime = itemCreatedAt.Add(ttl).AddMilliseconds(-1);
_inner.JumpTo(expiryTime);
}
/// <summary>
/// Position time 1ms after expiry (should be expired).
/// </summary>
/// <param name="itemCreatedAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
public void PositionJustAfterExpiry(DateTimeOffset itemCreatedAt, TimeSpan ttl)
{
var expiryTime = itemCreatedAt.Add(ttl).AddMilliseconds(1);
_inner.JumpTo(expiryTime);
}
/// <summary>
/// Position time 1 tick before expiry (minimum valid time).
/// </summary>
/// <param name="itemCreatedAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
public void PositionOneTickBeforeExpiry(DateTimeOffset itemCreatedAt, TimeSpan ttl)
{
var expiryTime = itemCreatedAt.Add(ttl).AddTicks(-1);
_inner.JumpTo(expiryTime);
}
/// <summary>
/// Position time 1 tick after expiry (minimum expired time).
/// </summary>
/// <param name="itemCreatedAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
public void PositionOneTickAfterExpiry(DateTimeOffset itemCreatedAt, TimeSpan ttl)
{
var expiryTime = itemCreatedAt.Add(ttl).AddTicks(1);
_inner.JumpTo(expiryTime);
}
/// <summary>
/// Generate boundary test cases for a given TTL.
/// </summary>
/// <param name="createdAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
/// <returns>Enumerable of test cases with name, time, and expected validity.</returns>
public static IEnumerable<TtlBoundaryTestCase> GenerateBoundaryTestCases(
DateTimeOffset createdAt,
TimeSpan ttl)
{
var expiry = createdAt.Add(ttl);
yield return new TtlBoundaryTestCase(
"1 tick before expiry",
expiry.AddTicks(-1),
ShouldBeExpired: false);
yield return new TtlBoundaryTestCase(
"Exactly at expiry",
expiry,
ShouldBeExpired: true); // Edge case - typically expired
yield return new TtlBoundaryTestCase(
"1 tick after expiry",
expiry.AddTicks(1),
ShouldBeExpired: true);
yield return new TtlBoundaryTestCase(
"1ms before expiry",
expiry.AddMilliseconds(-1),
ShouldBeExpired: false);
yield return new TtlBoundaryTestCase(
"1ms after expiry",
expiry.AddMilliseconds(1),
ShouldBeExpired: true);
yield return new TtlBoundaryTestCase(
"1 second before expiry",
expiry.AddSeconds(-1),
ShouldBeExpired: false);
yield return new TtlBoundaryTestCase(
"1 second after expiry",
expiry.AddSeconds(1),
ShouldBeExpired: true);
yield return new TtlBoundaryTestCase(
"Halfway through TTL",
createdAt.Add(ttl / 2),
ShouldBeExpired: false);
yield return new TtlBoundaryTestCase(
"Just created",
createdAt,
ShouldBeExpired: false);
yield return new TtlBoundaryTestCase(
"Well past expiry (2x TTL)",
createdAt.Add(ttl + ttl),
ShouldBeExpired: true);
}
/// <summary>
/// Generate test data for xUnit Theory.
/// </summary>
/// <param name="createdAt">When the item was created.</param>
/// <param name="ttl">The TTL duration.</param>
/// <returns>Test data as object arrays for MemberData.</returns>
public static IEnumerable<object[]> GenerateTheoryData(
DateTimeOffset createdAt,
TimeSpan ttl)
{
foreach (var testCase in GenerateBoundaryTestCases(createdAt, ttl))
{
yield return [testCase.Name, testCase.Time, testCase.ShouldBeExpired];
}
}
/// <summary>
/// Advance time by specified duration.
/// </summary>
/// <param name="duration">The duration to advance.</param>
public void Advance(TimeSpan duration) => _inner.Advance(duration);
/// <summary>
/// Jump to specific time.
/// </summary>
/// <param name="target">The target time.</param>
public void JumpTo(DateTimeOffset target) => _inner.JumpTo(target);
}
/// <summary>
/// Represents a TTL boundary test case.
/// </summary>
/// <param name="Name">Human-readable name of the test case.</param>
/// <param name="Time">The time to test at.</param>
/// <param name="ShouldBeExpired">Whether the item should be expired at this time.</param>
public sealed record TtlBoundaryTestCase(
string Name,
DateTimeOffset Time,
bool ShouldBeExpired);