test fixes and new product advisories work

This commit is contained in:
master
2026-01-28 02:30:48 +02:00
parent 82caceba56
commit 644887997c
288 changed files with 69101 additions and 375 deletions

View File

@@ -0,0 +1,308 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Probes OCI registry for comprehensive capability detection.
/// </summary>
public sealed class RegistryCapabilityProbeCheck : IDoctorCheck
{
/// <inheritdoc />
public string CheckId => "check.integration.oci.capabilities";
/// <inheritdoc />
public string Name => "OCI Registry Capability Matrix";
/// <inheritdoc />
public string Description => "Detect and report registry capabilities for OCI compliance";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Info;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["registry", "oci", "capabilities", "compatibility"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(15);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var registryUrl = context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url");
return !string.IsNullOrEmpty(registryUrl);
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var registryUrl = (context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url"))!.TrimEnd('/');
var testRepo = context.Configuration.GetValue<string>("OCI:TestRepository")
?? context.Configuration.GetValue<string>("Registry:TestRepository")
?? "library/alpine";
var builder = context.CreateResult(CheckId, "stellaops.doctor.integration", DoctorCategory.Integration.ToString());
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder.Skip("IHttpClientFactory not available").Build();
}
try
{
using var httpClient = httpClientFactory.CreateClient();
httpClient.Timeout = TimeSpan.FromSeconds(10);
ApplyAuthentication(context, httpClient);
// Probe all capabilities
var distributionVersion = await ProbeDistributionVersionAsync(httpClient, registryUrl, ct);
var supportsReferrersApi = await ProbeReferrersApiAsync(httpClient, registryUrl, testRepo, ct);
var supportsChunkedUpload = await ProbeChunkedUploadAsync(httpClient, registryUrl, testRepo, ct);
var supportsCrossRepoMount = await ProbeCrossRepoMountAsync(httpClient, registryUrl, testRepo, ct);
var supportsManifestDelete = await ProbeDeleteSupportAsync(httpClient, registryUrl, testRepo, "manifests", ct);
var supportsBlobDelete = await ProbeDeleteSupportAsync(httpClient, registryUrl, testRepo, "blobs", ct);
// Calculate capability score
var supportedCount = new[] {
supportsReferrersApi,
supportsChunkedUpload,
supportsCrossRepoMount,
supportsManifestDelete,
supportsBlobDelete
}.Count(c => c == true);
var totalCapabilities = 5;
var capabilityScore = $"{supportedCount}/{totalCapabilities}";
// Determine severity
var severity = DoctorSeverity.Pass;
var diagnosis = $"Registry supports {supportedCount} of {totalCapabilities} probed capabilities";
if (supportsReferrersApi == false)
{
severity = DoctorSeverity.Warn;
diagnosis = $"Registry missing referrers API support ({supportedCount}/{totalCapabilities} capabilities)";
}
else if (supportedCount < totalCapabilities)
{
severity = DoctorSeverity.Info;
}
return builder
.WithSeverity(severity, diagnosis)
.WithEvidence("Registry Capabilities", eb => eb
.Add("registry_url", registryUrl)
.Add("distribution_version", distributionVersion ?? "unknown")
.Add("supports_referrers_api", FormatBool(supportsReferrersApi))
.Add("supports_chunked_upload", FormatBool(supportsChunkedUpload))
.Add("supports_cross_repo_mount", FormatBool(supportsCrossRepoMount))
.Add("supports_manifest_delete", FormatBool(supportsManifestDelete))
.Add("supports_blob_delete", FormatBool(supportsBlobDelete))
.Add("capability_score", capabilityScore))
.Build();
}
catch (TaskCanceledException)
{
return builder
.Warn("Registry capability probe timed out")
.WithEvidence("Registry Capabilities", eb => eb
.Add("registry_url", registryUrl)
.Add("error", "Connection timeout"))
.Build();
}
catch (HttpRequestException ex)
{
return builder
.Fail($"Cannot reach registry: {ex.Message}")
.WithEvidence("Registry Capabilities", eb => eb
.Add("registry_url", registryUrl)
.Add("error", ex.Message))
.Build();
}
}
private static string FormatBool(bool? value) => value switch
{
true => "true",
false => "false",
null => "unknown"
};
private static void ApplyAuthentication(DoctorPluginContext context, HttpClient httpClient)
{
var username = context.Configuration.GetValue<string>("OCI:Username")
?? context.Configuration.GetValue<string>("Registry:Username");
var password = context.Configuration.GetValue<string>("OCI:Password")
?? context.Configuration.GetValue<string>("Registry:Password");
if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
var credentials = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Basic", credentials);
}
var bearerToken = context.Configuration.GetValue<string>("OCI:Token")
?? context.Configuration.GetValue<string>("Registry:Token");
if (!string.IsNullOrEmpty(bearerToken))
{
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", bearerToken);
}
}
private static async Task<string?> ProbeDistributionVersionAsync(
HttpClient httpClient,
string registryUrl,
CancellationToken ct)
{
try
{
using var response = await httpClient.GetAsync($"{registryUrl}/v2/", ct);
if (response.Headers.TryGetValues("OCI-Distribution-API-Version", out var versions))
{
return string.Join(", ", versions);
}
if (response.Headers.TryGetValues("Docker-Distribution-API-Version", out var dockerVersions))
{
return $"Docker: {string.Join(", ", dockerVersions)}";
}
return response.IsSuccessStatusCode ? "1.0 (assumed)" : null;
}
catch
{
return null;
}
}
private static async Task<bool?> ProbeReferrersApiAsync(
HttpClient httpClient,
string registryUrl,
string testRepo,
CancellationToken ct)
{
try
{
var fakeDigest = "sha256:0000000000000000000000000000000000000000000000000000000000000000";
using var request = new HttpRequestMessage(HttpMethod.Get, $"{registryUrl}/v2/{testRepo}/referrers/{fakeDigest}");
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.oci.image.index.v1+json"));
using var response = await httpClient.SendAsync(request, ct);
if (response.StatusCode == HttpStatusCode.OK)
return true;
if (response.StatusCode == HttpStatusCode.NotFound)
{
var contentType = response.Content.Headers.ContentType?.MediaType;
if (contentType?.Contains("oci") == true || contentType?.Contains("json") == true)
{
var content = await response.Content.ReadAsStringAsync(ct);
if (content.Contains("\"schemaVersion\"") || content.Contains("manifests"))
return true;
}
return false;
}
if (response.StatusCode == HttpStatusCode.MethodNotAllowed)
return false;
return null;
}
catch
{
return null;
}
}
private static async Task<bool?> ProbeChunkedUploadAsync(
HttpClient httpClient,
string registryUrl,
string testRepo,
CancellationToken ct)
{
try
{
using var response = await httpClient.PostAsync($"{registryUrl}/v2/{testRepo}/blobs/uploads/", null, ct);
if (response.StatusCode == HttpStatusCode.Accepted)
{
var location = response.Headers.Location;
if (location != null)
{
try { await httpClient.DeleteAsync(location, ct); } catch { /* Ignore cleanup errors */ }
}
return true;
}
return response.StatusCode == HttpStatusCode.Unauthorized ? null : false;
}
catch
{
return null;
}
}
private static async Task<bool?> ProbeCrossRepoMountAsync(
HttpClient httpClient,
string registryUrl,
string testRepo,
CancellationToken ct)
{
try
{
var fakeDigest = "sha256:0000000000000000000000000000000000000000000000000000000000000000";
var mountUrl = $"{registryUrl}/v2/{testRepo}/blobs/uploads/?mount={fakeDigest}&from=library/alpine";
using var response = await httpClient.PostAsync(mountUrl, null, ct);
return response.StatusCode is HttpStatusCode.Created or HttpStatusCode.Accepted;
}
catch
{
return null;
}
}
private static async Task<bool?> ProbeDeleteSupportAsync(
HttpClient httpClient,
string registryUrl,
string testRepo,
string resourceType,
CancellationToken ct)
{
try
{
var fakeRef = resourceType == "manifests" ? "nonexistent" : "sha256:0000000000000000000000000000000000000000000000000000000000000000";
using var request = new HttpRequestMessage(HttpMethod.Options, $"{registryUrl}/v2/{testRepo}/{resourceType}/{fakeRef}");
using var response = await httpClient.SendAsync(request, ct);
if (response.Headers.TryGetValues("Allow", out var allowedMethods))
{
var methods = string.Join(",", allowedMethods).ToUpperInvariant();
return methods.Contains("DELETE");
}
return null;
}
catch
{
return null;
}
}
}

View File

@@ -0,0 +1,237 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Validates registry credential configuration and token validity.
/// </summary>
public sealed class RegistryCredentialsCheck : IDoctorCheck
{
/// <inheritdoc />
public string CheckId => "check.integration.oci.credentials";
/// <inheritdoc />
public string Name => "OCI Registry Credentials";
/// <inheritdoc />
public string Description => "Validate registry credentials configuration and token expiry";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Fail;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["registry", "oci", "credentials", "secrets", "auth"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var registryUrl = context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url");
return !string.IsNullOrEmpty(registryUrl);
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var registryUrl = (context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url"))!.TrimEnd('/');
var builder = context.CreateResult(CheckId, "stellaops.doctor.integration", DoctorCategory.Integration.ToString());
// Determine auth method being used
var username = context.Configuration.GetValue<string>("OCI:Username")
?? context.Configuration.GetValue<string>("Registry:Username");
var password = context.Configuration.GetValue<string>("OCI:Password")
?? context.Configuration.GetValue<string>("Registry:Password");
var token = context.Configuration.GetValue<string>("OCI:Token")
?? context.Configuration.GetValue<string>("Registry:Token");
string authMethod;
var hasCredentials = false;
if (!string.IsNullOrEmpty(token))
{
authMethod = "bearer";
hasCredentials = true;
}
else if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
authMethod = "basic";
hasCredentials = true;
}
else if (!string.IsNullOrEmpty(username))
{
return builder
.Fail("Invalid credential configuration: username provided without password")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("auth_method", "incomplete")
.Add("username", username ?? "(not set)")
.Add("password", "(not set)")
.Add("token", "(not set)"))
.WithCauses(
"Password is missing from configuration",
"Password secret reference may not have resolved")
.WithRemediation(rb => rb
.AddManualStep(1, "Add password",
"Configure OCI:Password or Registry:Password")
.AddManualStep(2, "Check secret resolution",
"If using secret references, verify they resolve correctly"))
.Build();
}
else
{
authMethod = "anonymous";
hasCredentials = false;
}
// Validate credentials by attempting /v2/ authentication
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder.Skip("IHttpClientFactory not available").Build();
}
try
{
using var httpClient = httpClientFactory.CreateClient();
httpClient.Timeout = TimeSpan.FromSeconds(10);
// Apply authentication
if (authMethod == "bearer" && !string.IsNullOrEmpty(token))
{
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", token);
}
else if (authMethod == "basic" && !string.IsNullOrEmpty(username))
{
var credentials = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Basic", credentials);
}
using var response = await httpClient.GetAsync($"{registryUrl}/v2/", ct);
if (response.StatusCode == HttpStatusCode.OK)
{
return builder
.Pass("Registry credentials are valid")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("auth_method", authMethod)
.Add("username", Redact(username))
.Add("password", Redact(password))
.Add("token_valid", hasCredentials ? "true" : "n/a"))
.Build();
}
if (response.StatusCode == HttpStatusCode.Unauthorized)
{
// Check if this is a token exchange scenario (OAuth2 registries)
if (response.Headers.WwwAuthenticate.Any())
{
var wwwAuth = response.Headers.WwwAuthenticate.First().ToString();
if (wwwAuth.Contains("Bearer") && authMethod == "basic")
{
// This registry uses OAuth2 token exchange
// Basic auth credentials should work for token exchange
return builder
.Pass("Registry credentials are valid (OAuth2 token exchange required)")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("auth_method", authMethod)
.Add("token_exchange", "required")
.Add("username", Redact(username)))
.Build();
}
}
return builder
.Fail("Registry credentials validation failed: Authentication rejected")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("auth_method", authMethod)
.Add("username", Redact(username))
.Add("password", Redact(password))
.Add("token_valid", "false")
.Add("validation_error", "401 Unauthorized"))
.WithCauses(
"Credentials are invalid",
"Token has been revoked",
"Username/password combination incorrect")
.WithRemediation(rb => rb
.AddManualStep(1, "Verify credentials",
"Check that username and password are correct")
.AddStep(2, "Test with docker CLI",
$"docker login {new Uri(registryUrl).Host}",
CommandType.Shell)
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-auth-troubleshooting"))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (response.StatusCode == HttpStatusCode.Forbidden)
{
return builder
.Fail("Registry credentials validation failed: Access forbidden")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("auth_method", authMethod)
.Add("token_valid", "false")
.Add("validation_error", "403 Forbidden"))
.WithCauses(
"Credentials valid but access is forbidden",
"IP address or network not allowed")
.Build();
}
// For other status codes, assume valid
return builder
.Pass("Registry credentials appear valid")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("auth_method", authMethod)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture)))
.Build();
}
catch (TaskCanceledException)
{
return builder
.Fail("Credentials validation timed out")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("error", "Connection timeout"))
.Build();
}
catch (HttpRequestException ex)
{
return builder
.Fail($"Cannot reach registry: {ex.Message}")
.WithEvidence("Credentials", eb => eb
.Add("registry_url", registryUrl)
.Add("error", ex.Message))
.Build();
}
}
private static string Redact(string? value)
{
if (string.IsNullOrEmpty(value))
return "(not set)";
if (value.Length <= 4)
return "****";
return $"{value[..2]}****{value[^2..]}";
}
}

View File

@@ -0,0 +1,239 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Verifies pull authorization from the configured OCI registry.
/// Uses non-destructive HEAD request to test permissions.
/// </summary>
public sealed class RegistryPullAuthorizationCheck : IDoctorCheck
{
/// <inheritdoc />
public string CheckId => "check.integration.oci.pull";
/// <inheritdoc />
public string Name => "OCI Registry Pull Authorization";
/// <inheritdoc />
public string Description => "Verify credentials have pull access to the registry";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Fail;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["registry", "oci", "pull", "authorization", "credentials"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var registryUrl = context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url");
return !string.IsNullOrEmpty(registryUrl);
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var registryUrl = (context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url"))!.TrimEnd('/');
var testRepo = context.Configuration.GetValue<string>("OCI:TestRepository")
?? context.Configuration.GetValue<string>("Registry:TestRepository")
?? "library/alpine";
var testTag = context.Configuration.GetValue<string>("OCI:TestTag")
?? context.Configuration.GetValue<string>("Registry:TestTag")
?? "latest";
var builder = context.CreateResult(CheckId, "stellaops.doctor.integration", DoctorCategory.Integration.ToString());
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder.Skip("IHttpClientFactory not available").Build();
}
try
{
using var httpClient = httpClientFactory.CreateClient();
httpClient.Timeout = TimeSpan.FromSeconds(10);
ApplyAuthentication(context, httpClient);
using var request = new HttpRequestMessage(
HttpMethod.Head,
$"{registryUrl}/v2/{testRepo}/manifests/{testTag}");
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.oci.image.manifest.v1+json"));
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.v2+json"));
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.oci.image.index.v1+json"));
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.list.v2+json"));
using var response = await httpClient.SendAsync(request, ct);
if (response.IsSuccessStatusCode)
{
var digest = response.Headers.TryGetValues("Docker-Content-Digest", out var digestValues)
? digestValues.FirstOrDefault() ?? "unknown"
: "unknown";
var contentType = response.Content.Headers.ContentType?.MediaType ?? "unknown";
return builder
.Pass("Pull authorization verified")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("test_tag", testTag)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
.Add("pull_authorized", "true")
.Add("manifest_digest", digest)
.Add("manifest_type", contentType))
.Build();
}
if (response.StatusCode == HttpStatusCode.Unauthorized)
{
return builder
.Fail("Pull authorization failed: Invalid credentials")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("test_tag", testTag)
.Add("http_status", "401 Unauthorized")
.Add("pull_authorized", "false"))
.WithCauses(
"Credentials are invalid or expired",
"Token has been revoked",
"Anonymous pull not allowed",
"Wrong username/password combination")
.WithRemediation(rb => rb
.AddManualStep(1, "Verify credentials",
"Check that configured username/password or token is correct")
.AddStep(2, "Test with docker CLI",
$"docker pull {new Uri(registryUrl).Host}/{testRepo}:{testTag}",
CommandType.Shell)
.AddManualStep(3, "Check if anonymous pull is supported",
"Some private registries require authentication for all operations")
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-auth-troubleshooting"))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (response.StatusCode == HttpStatusCode.Forbidden)
{
return builder
.Fail("Pull authorization failed: No pull permission")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("test_tag", testTag)
.Add("http_status", "403 Forbidden")
.Add("pull_authorized", "false")
.Add("credentials_valid", "true"))
.WithCauses(
"Credentials valid but lack pull permissions",
"Repository access restricted",
"IP address or network not allowed")
.WithRemediation(rb => rb
.AddManualStep(1, "Check repository permissions",
$"Ensure service account has pull access to {testRepo}")
.AddManualStep(2, "Check access control lists",
"Verify the service account or IP is allowed to pull")
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-auth-troubleshooting"))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (response.StatusCode == HttpStatusCode.NotFound)
{
return builder
.Info("Cannot verify pull authorization - test image not found")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("test_tag", testTag)
.Add("http_status", "404 Not Found")
.Add("pull_authorized", "unknown")
.Add("reason", "Test image does not exist"))
.WithCauses(
"Test image does not exist in registry",
"Repository name format incorrect",
"Tag does not exist")
.WithRemediation(rb => rb
.AddManualStep(1, "Configure a valid test image",
"Set OCI:TestRepository and OCI:TestTag to an existing image in your registry"))
.Build();
}
return builder
.Fail($"Pull authorization check failed: {response.StatusCode}")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("test_tag", testTag)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture)))
.WithCauses(
"Registry returned unexpected response",
"Registry configuration issue")
.Build();
}
catch (TaskCanceledException)
{
return builder
.Fail("Pull authorization check timed out")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("error", "Connection timeout"))
.WithCauses(
"Registry is slow to respond",
"Network connectivity issue")
.Build();
}
catch (HttpRequestException ex)
{
return builder
.Fail($"Cannot reach registry: {ex.Message}")
.WithEvidence("Pull Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("error", ex.Message))
.WithCauses(
"Registry is unreachable",
"DNS resolution failure",
"TLS certificate error")
.Build();
}
}
private static void ApplyAuthentication(DoctorPluginContext context, HttpClient httpClient)
{
var username = context.Configuration.GetValue<string>("OCI:Username")
?? context.Configuration.GetValue<string>("Registry:Username");
var password = context.Configuration.GetValue<string>("OCI:Password")
?? context.Configuration.GetValue<string>("Registry:Password");
if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
var credentials = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Basic", credentials);
}
var bearerToken = context.Configuration.GetValue<string>("OCI:Token")
?? context.Configuration.GetValue<string>("Registry:Token");
if (!string.IsNullOrEmpty(bearerToken))
{
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", bearerToken);
}
}
}

View File

@@ -0,0 +1,221 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Verifies push authorization to the configured OCI registry.
/// Uses non-destructive blob upload initiation to test permissions.
/// </summary>
public sealed class RegistryPushAuthorizationCheck : IDoctorCheck
{
/// <inheritdoc />
public string CheckId => "check.integration.oci.push";
/// <inheritdoc />
public string Name => "OCI Registry Push Authorization";
/// <inheritdoc />
public string Description => "Verify credentials have push access to the registry";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Fail;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["registry", "oci", "push", "authorization", "credentials"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var registryUrl = context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url");
var hasAuth = HasAuthentication(context);
return !string.IsNullOrEmpty(registryUrl) && hasAuth;
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var registryUrl = (context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url"))!.TrimEnd('/');
var testRepo = context.Configuration.GetValue<string>("OCI:TestRepository")
?? context.Configuration.GetValue<string>("Registry:TestRepository")
?? context.Configuration.GetValue<string>("OCI:PushTestRepository")
?? "stellaops/doctor-test";
var builder = context.CreateResult(CheckId, "stellaops.doctor.integration", DoctorCategory.Integration.ToString());
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder.Skip("IHttpClientFactory not available").Build();
}
try
{
using var httpClient = httpClientFactory.CreateClient();
httpClient.Timeout = TimeSpan.FromSeconds(10);
ApplyAuthentication(context, httpClient);
var uploadUrl = $"{registryUrl}/v2/{testRepo}/blobs/uploads/";
using var response = await httpClient.PostAsync(uploadUrl, null, ct);
if (response.StatusCode == HttpStatusCode.Accepted)
{
var location = response.Headers.Location;
if (location != null)
{
try { await httpClient.DeleteAsync(location, ct); } catch { /* Ignore cleanup errors */ }
}
return builder
.Pass("Push authorization verified")
.WithEvidence("Push Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
.Add("push_authorized", "true")
.Add("upload_session_cancelled", "true"))
.Build();
}
if (response.StatusCode == HttpStatusCode.Unauthorized)
{
return builder
.Fail("Push authorization failed: Invalid credentials")
.WithEvidence("Push Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("http_status", "401 Unauthorized")
.Add("push_authorized", "false"))
.WithCauses(
"Credentials are invalid or expired",
"Token has been revoked",
"Wrong username/password combination")
.WithRemediation(rb => rb
.AddManualStep(1, "Verify credentials",
"Check that configured username/password or token is correct")
.AddStep(2, "Test with docker CLI",
$"docker login {new Uri(registryUrl).Host}",
CommandType.Shell)
.AddManualStep(3, "Regenerate token if expired",
"Generate a new access token from your registry provider")
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-auth-troubleshooting"))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (response.StatusCode == HttpStatusCode.Forbidden)
{
return builder
.Fail("Push authorization failed: No push permission")
.WithEvidence("Push Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("http_status", "403 Forbidden")
.Add("push_authorized", "false")
.Add("credentials_valid", "true"))
.WithCauses(
"Credentials valid but lack push permissions",
"Repository does not exist and cannot be created",
"Repository permissions restrict push access",
"Organization/team permissions prevent push")
.WithRemediation(rb => rb
.AddManualStep(1, "Check repository permissions",
$"Ensure service account has push access to {testRepo}")
.AddManualStep(2, "Create repository if needed",
"Some registries require repository to exist before push")
.AddManualStep(3, "Contact registry administrator",
"Request push permissions for the service account")
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-auth-troubleshooting"))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
return builder
.Fail($"Push authorization check failed: {response.StatusCode}")
.WithEvidence("Push Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture)))
.WithCauses(
"Registry returned unexpected response",
"Repository path format incorrect",
"Registry configuration issue")
.Build();
}
catch (TaskCanceledException)
{
return builder
.Fail("Push authorization check timed out")
.WithEvidence("Push Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("error", "Connection timeout"))
.WithCauses(
"Registry is slow to respond",
"Network connectivity issue")
.Build();
}
catch (HttpRequestException ex)
{
return builder
.Fail($"Cannot reach registry: {ex.Message}")
.WithEvidence("Push Authorization", eb => eb
.Add("registry_url", registryUrl)
.Add("error", ex.Message))
.WithCauses(
"Registry is unreachable",
"DNS resolution failure",
"TLS certificate error")
.Build();
}
}
private static bool HasAuthentication(DoctorPluginContext context)
{
var username = context.Configuration.GetValue<string>("OCI:Username")
?? context.Configuration.GetValue<string>("Registry:Username");
var password = context.Configuration.GetValue<string>("OCI:Password")
?? context.Configuration.GetValue<string>("Registry:Password");
var token = context.Configuration.GetValue<string>("OCI:Token")
?? context.Configuration.GetValue<string>("Registry:Token");
return (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password)) ||
!string.IsNullOrEmpty(token);
}
private static void ApplyAuthentication(DoctorPluginContext context, HttpClient httpClient)
{
var username = context.Configuration.GetValue<string>("OCI:Username")
?? context.Configuration.GetValue<string>("Registry:Username");
var password = context.Configuration.GetValue<string>("OCI:Password")
?? context.Configuration.GetValue<string>("Registry:Password");
if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
var credentials = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Basic", credentials);
}
var bearerToken = context.Configuration.GetValue<string>("OCI:Token")
?? context.Configuration.GetValue<string>("Registry:Token");
if (!string.IsNullOrEmpty(bearerToken))
{
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", bearerToken);
}
}
}

View File

@@ -0,0 +1,278 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Checks if the configured OCI registry supports the referrers API (OCI Distribution Spec v1.1+).
/// </summary>
public sealed class RegistryReferrersApiCheck : IDoctorCheck
{
/// <inheritdoc />
public string CheckId => "check.integration.oci.referrers";
/// <inheritdoc />
public string Name => "OCI Registry Referrers API Support";
/// <inheritdoc />
public string Description => "Verify registry supports OCI 1.1 referrers API for artifact linking";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["registry", "oci", "referrers", "compatibility", "oci-1.1"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(10);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var registryUrl = context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url");
return !string.IsNullOrEmpty(registryUrl);
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var registryUrl = (context.Configuration.GetValue<string>("OCI:RegistryUrl")
?? context.Configuration.GetValue<string>("Registry:Url"))!.TrimEnd('/');
var testRepo = context.Configuration.GetValue<string>("OCI:TestRepository")
?? context.Configuration.GetValue<string>("Registry:TestRepository")
?? "library/alpine";
var testTag = context.Configuration.GetValue<string>("OCI:TestTag")
?? context.Configuration.GetValue<string>("Registry:TestTag")
?? "latest";
var builder = context.CreateResult(CheckId, "stellaops.doctor.integration", DoctorCategory.Integration.ToString());
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder.Skip("IHttpClientFactory not available").Build();
}
try
{
using var httpClient = httpClientFactory.CreateClient();
httpClient.Timeout = TimeSpan.FromSeconds(15);
ApplyAuthentication(context, httpClient);
// First, resolve the digest for the test tag
var manifestDigest = await ResolveManifestDigestAsync(httpClient, registryUrl, testRepo, testTag, ct);
if (string.IsNullOrEmpty(manifestDigest))
{
return builder
.Info("Cannot verify referrers API - test image not found")
.WithEvidence("Referrers API Check", eb => eb
.Add("registry_url", registryUrl)
.Add("test_repository", testRepo)
.Add("test_tag", testTag)
.Add("reason", "Test image not found or not accessible"))
.WithCauses(
"Test image does not exist in registry",
"Credentials lack pull permissions",
"Repository name format incorrect for registry")
.Build();
}
// Probe the referrers API endpoint
var referrersEndpoint = $"{registryUrl}/v2/{testRepo}/referrers/{manifestDigest}";
using var request = new HttpRequestMessage(HttpMethod.Get, referrersEndpoint);
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.oci.image.index.v1+json"));
using var response = await httpClient.SendAsync(request, ct);
var ociVersion = response.Headers.TryGetValues("OCI-Distribution-API-Version", out var versionHeaders)
? string.Join(", ", versionHeaders)
: "unknown";
if (response.StatusCode == HttpStatusCode.OK)
{
return builder
.Pass("OCI referrers API is supported")
.WithEvidence("Referrers API Support", eb => eb
.Add("registry_url", registryUrl)
.Add("api_endpoint", referrersEndpoint)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
.Add("oci_version", ociVersion)
.Add("referrers_supported", "true")
.Add("fallback_required", "false"))
.Build();
}
if (response.StatusCode == HttpStatusCode.NotFound)
{
var content = await response.Content.ReadAsStringAsync(ct);
var isOciIndex = content.Contains("\"schemaVersion\"") &&
(content.Contains("\"manifests\"") || content.Contains("application/vnd.oci.image.index"));
if (isOciIndex)
{
return builder
.Pass("OCI referrers API is supported (no referrers for test image)")
.WithEvidence("Referrers API Support", eb => eb
.Add("registry_url", registryUrl)
.Add("api_endpoint", referrersEndpoint)
.Add("http_status", "404 (with OCI index)")
.Add("oci_version", ociVersion)
.Add("referrers_supported", "true")
.Add("referrers_count", "0")
.Add("fallback_required", "false"))
.Build();
}
return builder
.Warn("OCI referrers API not supported - using tag-based fallback")
.WithEvidence("Referrers API Support", eb => eb
.Add("registry_url", registryUrl)
.Add("api_endpoint", referrersEndpoint)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
.Add("oci_version", ociVersion)
.Add("referrers_supported", "false")
.Add("fallback_required", "true")
.Add("fallback_pattern", "sha256-{digest}.{artifactType}"))
.WithCauses(
"Registry does not implement OCI Distribution Spec v1.1",
"Registry version is too old",
"Referrers API disabled in registry configuration")
.WithRemediation(rb => rb
.AddManualStep(1, "Check registry version",
"Verify your registry version supports OCI Distribution Spec v1.1+")
.AddManualStep(2, "Upgrade registry",
"Upgrade to: Harbor 2.6+, Quay 3.12+, ACR (default), ECR (default), GCR/Artifact Registry (default)")
.AddManualStep(3, "Note: Fallback available",
"StellaOps automatically uses tag-based fallback (sha256-{digest}.*) when referrers API is unavailable")
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-referrer-troubleshooting"))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (response.StatusCode == HttpStatusCode.MethodNotAllowed)
{
return builder
.Warn("OCI referrers API not supported - using tag-based fallback")
.WithEvidence("Referrers API Support", eb => eb
.Add("registry_url", registryUrl)
.Add("api_endpoint", referrersEndpoint)
.Add("http_status", "405 Method Not Allowed")
.Add("oci_version", ociVersion)
.Add("referrers_supported", "false")
.Add("fallback_required", "true"))
.WithCauses(
"Registry does not implement OCI Distribution Spec v1.1",
"Referrers API not enabled for this repository")
.WithRemediation(rb => rb
.AddManualStep(1, "Check registry documentation",
"Review registry documentation for OCI 1.1 referrers API support")
.AddManualStep(2, "Note: Fallback available",
"StellaOps automatically uses tag-based fallback when referrers API is unavailable")
.WithRunbookUrl("https://docs.stella-ops.org/runbooks/registry-referrer-troubleshooting"))
.Build();
}
return builder
.Fail($"Registry referrers API check failed: {response.StatusCode}")
.WithEvidence("Referrers API Check", eb => eb
.Add("registry_url", registryUrl)
.Add("api_endpoint", referrersEndpoint)
.Add("http_status", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
.Add("oci_version", ociVersion))
.WithCauses(
"Registry returned unexpected error",
"Authentication issue",
"Network connectivity problem")
.Build();
}
catch (TaskCanceledException)
{
return builder
.Warn("Registry referrers API check timed out")
.WithEvidence("Referrers API Check", eb => eb
.Add("registry_url", registryUrl)
.Add("error", "Connection timeout"))
.WithCauses(
"Registry is slow to respond",
"Network connectivity issue",
"Registry is under heavy load")
.Build();
}
catch (HttpRequestException ex)
{
return builder
.Fail($"Cannot reach registry: {ex.Message}")
.WithEvidence("Referrers API Check", eb => eb
.Add("registry_url", registryUrl)
.Add("error", ex.Message))
.WithCauses(
"Registry is unreachable",
"DNS resolution failure",
"TLS certificate error")
.Build();
}
}
private static void ApplyAuthentication(DoctorPluginContext context, HttpClient httpClient)
{
var username = context.Configuration.GetValue<string>("OCI:Username")
?? context.Configuration.GetValue<string>("Registry:Username");
var password = context.Configuration.GetValue<string>("OCI:Password")
?? context.Configuration.GetValue<string>("Registry:Password");
if (!string.IsNullOrEmpty(username) && !string.IsNullOrEmpty(password))
{
var credentials = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Basic", credentials);
}
var bearerToken = context.Configuration.GetValue<string>("OCI:Token")
?? context.Configuration.GetValue<string>("Registry:Token");
if (!string.IsNullOrEmpty(bearerToken))
{
httpClient.DefaultRequestHeaders.Authorization =
new AuthenticationHeaderValue("Bearer", bearerToken);
}
}
private static async Task<string?> ResolveManifestDigestAsync(
HttpClient httpClient,
string registryUrl,
string repository,
string tag,
CancellationToken ct)
{
using var request = new HttpRequestMessage(
HttpMethod.Head,
$"{registryUrl}/v2/{repository}/manifests/{tag}");
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.oci.image.manifest.v1+json"));
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.v2+json"));
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.oci.image.index.v1+json"));
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.list.v2+json"));
using var response = await httpClient.SendAsync(request, ct);
if (!response.IsSuccessStatusCode)
return null;
if (response.Headers.TryGetValues("Docker-Content-Digest", out var digestValues))
{
return digestValues.FirstOrDefault();
}
return null;
}
}

View File

@@ -31,6 +31,11 @@ public sealed class IntegrationPlugin : IDoctorPlugin
public IReadOnlyList<IDoctorCheck> GetChecks(DoctorPluginContext context) =>
[
new OciRegistryCheck(),
new RegistryReferrersApiCheck(),
new RegistryCapabilityProbeCheck(),
new RegistryPushAuthorizationCheck(),
new RegistryPullAuthorizationCheck(),
new RegistryCredentialsCheck(),
new ObjectStorageCheck(),
new SmtpCheck(),
new SlackWebhookCheck(),

View File

@@ -0,0 +1,396 @@
using System.Reflection;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.TestKit.Traits;
namespace StellaOps.TestKit.Analysis;
/// <summary>
/// Generates intent coverage reports from test assemblies.
/// </summary>
/// <remarks>
/// The report generator scans assemblies for tests with Intent traits and produces
/// coverage matrices showing distribution of intents across modules. This helps
/// identify coverage gaps (e.g., 90% Operational, 2% Safety) and drive testing investment.
///
/// Usage:
/// <code>
/// var generator = new IntentCoverageReportGenerator();
/// generator.AddAssembly(typeof(MyTests).Assembly);
/// var report = generator.Generate();
/// await report.WriteJsonAsync("intent-coverage.json");
/// </code>
/// </remarks>
public sealed class IntentCoverageReportGenerator
{
private readonly List<Assembly> _assemblies = new();
private readonly Dictionary<string, ModuleIntentStats> _moduleStats = new();
/// <summary>
/// Add an assembly to scan for intent-tagged tests.
/// </summary>
public void AddAssembly(Assembly assembly)
{
ArgumentNullException.ThrowIfNull(assembly);
_assemblies.Add(assembly);
}
/// <summary>
/// Add multiple assemblies to scan.
/// </summary>
public void AddAssemblies(IEnumerable<Assembly> assemblies)
{
foreach (var assembly in assemblies)
{
AddAssembly(assembly);
}
}
/// <summary>
/// Generate the intent coverage report.
/// </summary>
public IntentCoverageReport Generate()
{
_moduleStats.Clear();
foreach (var assembly in _assemblies)
{
ScanAssembly(assembly);
}
var intents = TestIntents.All.ToDictionary(
i => i,
i => _moduleStats.Values.Sum(m => m.IntentCounts.GetValueOrDefault(i, 0)));
var totalTests = _moduleStats.Values.Sum(m => m.TotalTests);
var taggedTests = _moduleStats.Values.Sum(m => m.TaggedTests);
var untaggedTests = totalTests - taggedTests;
return new IntentCoverageReport
{
GeneratedAt = DateTimeOffset.UtcNow,
TotalTests = totalTests,
TaggedTests = taggedTests,
UntaggedTests = untaggedTests,
TagCoveragePercent = totalTests > 0 ? (double)taggedTests / totalTests * 100 : 0,
IntentDistribution = intents,
ModuleStats = _moduleStats.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToReadOnly()),
Warnings = GenerateWarnings(intents, totalTests, taggedTests)
};
}
private void ScanAssembly(Assembly assembly)
{
var moduleName = ExtractModuleName(assembly);
if (!_moduleStats.TryGetValue(moduleName, out var stats))
{
stats = new ModuleIntentStats { ModuleName = moduleName };
_moduleStats[moduleName] = stats;
}
var testTypes = assembly.GetTypes()
.Where(t => t.IsClass && !t.IsAbstract && HasTestMethods(t));
foreach (var type in testTypes)
{
ScanType(type, stats);
}
}
private static void ScanType(Type type, ModuleIntentStats stats)
{
// Check class-level intent attributes
var classIntents = type.GetCustomAttributes<IntentAttribute>().ToList();
var testMethods = type.GetMethods(BindingFlags.Public | BindingFlags.Instance)
.Where(m => IsTestMethod(m));
foreach (var method in testMethods)
{
stats.TotalTests++;
var methodIntents = method.GetCustomAttributes<IntentAttribute>().ToList();
var allIntents = classIntents.Concat(methodIntents).ToList();
if (allIntents.Count > 0)
{
stats.TaggedTests++;
foreach (var intent in allIntents)
{
stats.IntentCounts.TryGetValue(intent.Intent, out var count);
stats.IntentCounts[intent.Intent] = count + 1;
}
if (allIntents.Any(i => !string.IsNullOrWhiteSpace(i.Rationale)))
{
stats.TestsWithRationale++;
}
}
else
{
// Check for Trait-based intent
var traitAttrs = method.GetCustomAttributes()
.Where(a => a.GetType().Name == "TraitAttribute")
.ToList();
foreach (var attr in traitAttrs)
{
var nameProp = attr.GetType().GetProperty("Name");
var valueProp = attr.GetType().GetProperty("Value");
if (nameProp?.GetValue(attr) is string name &&
valueProp?.GetValue(attr) is string value &&
name == "Intent")
{
stats.TaggedTests++;
stats.IntentCounts.TryGetValue(value, out var count);
stats.IntentCounts[value] = count + 1;
}
}
}
}
}
private static bool HasTestMethods(Type type)
{
return type.GetMethods(BindingFlags.Public | BindingFlags.Instance)
.Any(IsTestMethod);
}
private static bool IsTestMethod(MethodInfo method)
{
var attrs = method.GetCustomAttributes().Select(a => a.GetType().Name).ToHashSet();
return attrs.Contains("FactAttribute") ||
attrs.Contains("TheoryAttribute") ||
attrs.Contains("TestAttribute");
}
private static string ExtractModuleName(Assembly assembly)
{
var name = assembly.GetName().Name ?? "Unknown";
// Extract module from assembly name like "StellaOps.Policy.Tests"
var parts = name.Split('.');
if (parts.Length >= 2 && parts[0] == "StellaOps")
{
return parts[1];
}
return name;
}
private static List<string> GenerateWarnings(
Dictionary<string, int> intents,
int totalTests,
int taggedTests)
{
var warnings = new List<string>();
// Warn if less than 50% of tests are tagged
if (totalTests > 0 && (double)taggedTests / totalTests < 0.5)
{
var percent = (double)taggedTests / totalTests * 100;
warnings.Add($"Low intent coverage: only {percent:F1}% of tests have intent tags");
}
// Warn about intent imbalance
var totalTagged = intents.Values.Sum();
if (totalTagged > 10)
{
foreach (var (intent, count) in intents)
{
var percent = (double)count / totalTagged * 100;
if (percent > 80)
{
warnings.Add($"Intent imbalance: {intent} accounts for {percent:F1}% of tagged tests");
}
else if (percent < 5 && intent is "Safety" or "Regulatory")
{
warnings.Add($"Critical intent underrepresented: {intent} is only {percent:F1}% of tagged tests");
}
}
}
// Warn if Safety is completely missing
if (!intents.TryGetValue(TestIntents.Safety, out var safetyCount) || safetyCount == 0)
{
warnings.Add("No tests tagged with Safety intent");
}
return warnings;
}
private sealed class ModuleIntentStats
{
public required string ModuleName { get; init; }
public int TotalTests { get; set; }
public int TaggedTests { get; set; }
public int TestsWithRationale { get; set; }
public Dictionary<string, int> IntentCounts { get; } = new();
public ModuleIntentStatsReadOnly ToReadOnly() => new()
{
ModuleName = ModuleName,
TotalTests = TotalTests,
TaggedTests = TaggedTests,
TestsWithRationale = TestsWithRationale,
TagCoveragePercent = TotalTests > 0 ? (double)TaggedTests / TotalTests * 100 : 0,
IntentCounts = new Dictionary<string, int>(IntentCounts)
};
}
}
/// <summary>
/// Intent coverage report output format.
/// </summary>
public sealed record IntentCoverageReport
{
/// <summary>
/// When the report was generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Total number of test methods scanned.
/// </summary>
public required int TotalTests { get; init; }
/// <summary>
/// Number of tests with intent tags.
/// </summary>
public required int TaggedTests { get; init; }
/// <summary>
/// Number of tests without intent tags.
/// </summary>
public required int UntaggedTests { get; init; }
/// <summary>
/// Percentage of tests with intent tags (0-100).
/// </summary>
public required double TagCoveragePercent { get; init; }
/// <summary>
/// Count of tests per intent category.
/// </summary>
public required Dictionary<string, int> IntentDistribution { get; init; }
/// <summary>
/// Per-module statistics.
/// </summary>
public required Dictionary<string, ModuleIntentStatsReadOnly> ModuleStats { get; init; }
/// <summary>
/// Generated warnings about coverage gaps or imbalances.
/// </summary>
public required List<string> Warnings { get; init; }
/// <summary>
/// Write the report as JSON to a file.
/// </summary>
public async Task WriteJsonAsync(string filePath, CancellationToken ct = default)
{
var options = new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
await using var stream = File.Create(filePath);
await JsonSerializer.SerializeAsync(stream, this, options, ct);
}
/// <summary>
/// Generate a markdown summary of the report.
/// </summary>
public string ToMarkdown()
{
var sb = new System.Text.StringBuilder();
sb.AppendLine("# Intent Coverage Report");
sb.AppendLine();
sb.AppendLine($"Generated: {GeneratedAt:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine();
sb.AppendLine("## Summary");
sb.AppendLine();
sb.AppendLine($"- Total tests: {TotalTests}");
sb.AppendLine($"- Tagged: {TaggedTests} ({TagCoveragePercent:F1}%)");
sb.AppendLine($"- Untagged: {UntaggedTests}");
sb.AppendLine();
sb.AppendLine("## Intent Distribution");
sb.AppendLine();
sb.AppendLine("| Intent | Count | Percent |");
sb.AppendLine("|--------|------:|--------:|");
var total = IntentDistribution.Values.Sum();
foreach (var (intent, count) in IntentDistribution.OrderByDescending(kvp => kvp.Value))
{
var percent = total > 0 ? (double)count / total * 100 : 0;
sb.AppendLine($"| {intent} | {count} | {percent:F1}% |");
}
if (ModuleStats.Count > 0)
{
sb.AppendLine();
sb.AppendLine("## Per-Module Coverage");
sb.AppendLine();
sb.AppendLine("| Module | Total | Tagged | Coverage |");
sb.AppendLine("|--------|------:|-------:|---------:|");
foreach (var (module, stats) in ModuleStats.OrderBy(kvp => kvp.Key))
{
sb.AppendLine($"| {module} | {stats.TotalTests} | {stats.TaggedTests} | {stats.TagCoveragePercent:F1}% |");
}
}
if (Warnings.Count > 0)
{
sb.AppendLine();
sb.AppendLine("## Warnings");
sb.AppendLine();
foreach (var warning in Warnings)
{
sb.AppendLine($"- {warning}");
}
}
return sb.ToString();
}
}
/// <summary>
/// Read-only module intent statistics for report output.
/// </summary>
public sealed record ModuleIntentStatsReadOnly
{
/// <summary>
/// Module name extracted from assembly.
/// </summary>
public required string ModuleName { get; init; }
/// <summary>
/// Total test count in module.
/// </summary>
public required int TotalTests { get; init; }
/// <summary>
/// Tests with intent tags.
/// </summary>
public required int TaggedTests { get; init; }
/// <summary>
/// Tests with rationale in their intent attribute.
/// </summary>
public required int TestsWithRationale { get; init; }
/// <summary>
/// Tag coverage percentage (0-100).
/// </summary>
public required double TagCoveragePercent { get; init; }
/// <summary>
/// Intent counts for this module.
/// </summary>
public required Dictionary<string, int> IntentCounts { get; init; }
}

View File

@@ -27,7 +27,7 @@ namespace StellaOps.TestKit.Assertions;
public static class SnapshotAssert
{
private static readonly bool UpdateSnapshotsMode =
Environment.GetEnvironmentVariable("UPDATE_SNAPSHOTS") == "1";
global::System.Environment.GetEnvironmentVariable("UPDATE_SNAPSHOTS") == "1";
/// <summary>
/// Asserts that the value matches the stored snapshot. If UPDATE_SNAPSHOTS=1, updates the snapshot.

View File

@@ -78,13 +78,13 @@ public abstract class ConnectorLiveSchemaTestBase : IAsyncLifetime
/// Returns true if live tests are enabled.
/// </summary>
protected static bool IsEnabled =>
Environment.GetEnvironmentVariable("STELLAOPS_LIVE_TESTS") == "true";
global::System.Environment.GetEnvironmentVariable("STELLAOPS_LIVE_TESTS") == "true";
/// <summary>
/// Returns true if fixture auto-update is enabled.
/// </summary>
protected static bool IsAutoUpdateEnabled =>
Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") == "true";
global::System.Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") == "true";
/// <summary>
/// Optional request headers for live requests.
@@ -182,7 +182,7 @@ public sealed class LiveTestAttribute : FactAttribute
{
public LiveTestAttribute()
{
if (Environment.GetEnvironmentVariable("STELLAOPS_LIVE_TESTS") != "true")
if (global::System.Environment.GetEnvironmentVariable("STELLAOPS_LIVE_TESTS") != "true")
{
Skip = "Live tests are disabled. Set STELLAOPS_LIVE_TESTS=true to enable.";
}
@@ -197,7 +197,7 @@ public sealed class LiveTheoryAttribute : TheoryAttribute
{
public LiveTheoryAttribute()
{
if (Environment.GetEnvironmentVariable("STELLAOPS_LIVE_TESTS") != "true")
if (global::System.Environment.GetEnvironmentVariable("STELLAOPS_LIVE_TESTS") != "true")
{
Skip = "Live tests are disabled. Set STELLAOPS_LIVE_TESTS=true to enable.";
}

View File

@@ -120,7 +120,7 @@ public abstract class ConnectorParserTestBase<TRawModel, TNormalizedModel> : IDi
/// </summary>
protected void UpdateSnapshot(string fixtureFile, string expectedFile)
{
if (Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") != "true")
if (global::System.Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") != "true")
{
throw new InvalidOperationException(
"Set STELLAOPS_UPDATE_FIXTURES=true to update snapshots");

View File

@@ -16,7 +16,7 @@ public sealed class FixtureUpdater
{
_fixturesDirectory = fixturesDirectory;
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_enabled = Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") == "true";
_enabled = global::System.Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") == "true";
}
/// <summary>

View File

@@ -0,0 +1,270 @@
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Environment;
/// <summary>
/// Defines an infrastructure profile for environment skew testing.
/// </summary>
/// <remarks>
/// Environment skew tests validate that the system behaves consistently
/// across different infrastructure configurations:
/// - CPU architectures (x64, ARM64)
/// - Network conditions (latency, packet loss)
/// - Container runtimes (Docker, containerd, Podman)
///
/// Usage:
/// <code>
/// var profile = EnvironmentProfile.HighLatency;
/// var runner = new SkewTestRunner();
/// var report = await runner.RunAcrossProfiles(
/// test: () => RunMyTest(),
/// profiles: [EnvironmentProfile.Standard, EnvironmentProfile.HighLatency]);
///
/// runner.AssertEquivalence(report, tolerance: 0.05);
/// </code>
/// </remarks>
public sealed record EnvironmentProfile
{
/// <summary>
/// Profile name for identification.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// CPU architecture profile.
/// </summary>
[JsonPropertyName("cpu")]
public CpuProfile Cpu { get; init; } = new();
/// <summary>
/// Network conditions profile.
/// </summary>
[JsonPropertyName("network")]
public NetworkProfile Network { get; init; } = new();
/// <summary>
/// Container runtime profile.
/// </summary>
[JsonPropertyName("runtime")]
public ContainerRuntime Runtime { get; init; } = ContainerRuntime.Docker;
/// <summary>
/// Additional environment variables.
/// </summary>
[JsonPropertyName("environmentVariables")]
public Dictionary<string, string> EnvironmentVariables { get; init; } = [];
/// <summary>
/// Resource limits.
/// </summary>
[JsonPropertyName("resourceLimits")]
public ResourceLimits ResourceLimits { get; init; } = new();
#region Predefined Profiles
/// <summary>
/// Standard profile: default Testcontainers, no network shaping.
/// </summary>
public static EnvironmentProfile Standard => new()
{
Name = "Standard",
Cpu = new CpuProfile { Architecture = CpuArchitecture.X64 },
Network = new NetworkProfile(),
Runtime = ContainerRuntime.Docker
};
/// <summary>
/// High latency profile: 100ms added latency.
/// </summary>
public static EnvironmentProfile HighLatency => new()
{
Name = "HighLatency",
Cpu = new CpuProfile { Architecture = CpuArchitecture.X64 },
Network = new NetworkProfile { Latency = TimeSpan.FromMilliseconds(100) },
Runtime = ContainerRuntime.Docker
};
/// <summary>
/// Low bandwidth profile: 10 Mbps limit.
/// </summary>
public static EnvironmentProfile LowBandwidth => new()
{
Name = "LowBandwidth",
Cpu = new CpuProfile { Architecture = CpuArchitecture.X64 },
Network = new NetworkProfile { BandwidthMbps = 10 },
Runtime = ContainerRuntime.Docker
};
/// <summary>
/// Packet loss profile: 1% packet loss.
/// </summary>
public static EnvironmentProfile PacketLoss => new()
{
Name = "PacketLoss",
Cpu = new CpuProfile { Architecture = CpuArchitecture.X64 },
Network = new NetworkProfile { PacketLossRate = 0.01 },
Runtime = ContainerRuntime.Docker
};
/// <summary>
/// ARM64 CPU profile (if available).
/// </summary>
public static EnvironmentProfile ArmCpu => new()
{
Name = "ArmCpu",
Cpu = new CpuProfile { Architecture = CpuArchitecture.Arm64 },
Network = new NetworkProfile(),
Runtime = ContainerRuntime.Docker
};
/// <summary>
/// Resource-constrained profile: limited CPU and memory.
/// </summary>
public static EnvironmentProfile ResourceConstrained => new()
{
Name = "ResourceConstrained",
Cpu = new CpuProfile { Architecture = CpuArchitecture.X64, CpuLimit = 0.5 },
Network = new NetworkProfile(),
Runtime = ContainerRuntime.Docker,
ResourceLimits = new ResourceLimits { MemoryMb = 256, CpuCores = 1 }
};
/// <summary>
/// All predefined profiles for comprehensive testing.
/// </summary>
public static IReadOnlyList<EnvironmentProfile> All =>
[
Standard,
HighLatency,
LowBandwidth,
PacketLoss,
ResourceConstrained
];
/// <summary>
/// Network-focused profiles.
/// </summary>
public static IReadOnlyList<EnvironmentProfile> NetworkProfiles =>
[
Standard,
HighLatency,
LowBandwidth,
PacketLoss
];
#endregion
}
/// <summary>
/// CPU architecture and limits.
/// </summary>
public sealed record CpuProfile
{
/// <summary>
/// Target CPU architecture.
/// </summary>
[JsonPropertyName("architecture")]
public CpuArchitecture Architecture { get; init; } = CpuArchitecture.X64;
/// <summary>
/// CPU limit as a fraction (0.5 = 50% of one core).
/// </summary>
[JsonPropertyName("cpuLimit")]
public double CpuLimit { get; init; } = 1.0;
}
/// <summary>
/// Network conditions for testing.
/// </summary>
public sealed record NetworkProfile
{
/// <summary>
/// Added network latency.
/// </summary>
[JsonPropertyName("latency")]
public TimeSpan Latency { get; init; } = TimeSpan.Zero;
/// <summary>
/// Packet loss rate (0.01 = 1%).
/// </summary>
[JsonPropertyName("packetLossRate")]
public double PacketLossRate { get; init; } = 0;
/// <summary>
/// Bandwidth limit in Mbps (0 = unlimited).
/// </summary>
[JsonPropertyName("bandwidthMbps")]
public int BandwidthMbps { get; init; } = 0;
/// <summary>
/// Jitter in milliseconds.
/// </summary>
[JsonPropertyName("jitterMs")]
public int JitterMs { get; init; } = 0;
/// <summary>
/// Whether this profile requires network shaping (tc/netem).
/// </summary>
[JsonIgnore]
public bool RequiresNetworkShaping =>
Latency > TimeSpan.Zero ||
PacketLossRate > 0 ||
BandwidthMbps > 0 ||
JitterMs > 0;
}
/// <summary>
/// Resource limits for containers.
/// </summary>
public sealed record ResourceLimits
{
/// <summary>
/// Memory limit in MB.
/// </summary>
[JsonPropertyName("memoryMb")]
public int MemoryMb { get; init; } = 0;
/// <summary>
/// CPU cores limit.
/// </summary>
[JsonPropertyName("cpuCores")]
public int CpuCores { get; init; } = 0;
}
/// <summary>
/// CPU architecture options.
/// </summary>
public enum CpuArchitecture
{
/// <summary>
/// x86-64 architecture.
/// </summary>
X64,
/// <summary>
/// ARM 64-bit architecture.
/// </summary>
Arm64
}
/// <summary>
/// Container runtime options.
/// </summary>
public enum ContainerRuntime
{
/// <summary>
/// Docker runtime.
/// </summary>
Docker,
/// <summary>
/// containerd runtime.
/// </summary>
Containerd,
/// <summary>
/// Podman runtime.
/// </summary>
Podman
}

View File

@@ -0,0 +1,398 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Environment;
/// <summary>
/// Runs tests across different environment profiles and compares results.
/// </summary>
/// <remarks>
/// The skew test runner executes the same test across multiple environment
/// profiles and validates that results are equivalent within tolerance.
///
/// Usage:
/// <code>
/// var runner = new SkewTestRunner();
/// var report = await runner.RunAcrossProfiles(
/// test: async () =>
/// {
/// var result = await MyService.ProcessRequest();
/// return new TestResult { Value = result.Value, DurationMs = timer.ElapsedMilliseconds };
/// },
/// profiles: EnvironmentProfile.NetworkProfiles);
///
/// runner.AssertEquivalence(report, tolerance: 0.05);
/// </code>
/// </remarks>
public sealed class SkewTestRunner
{
/// <summary>
/// Runs a test across multiple environment profiles.
/// </summary>
/// <param name="test">The test to execute.</param>
/// <param name="profiles">Environment profiles to test against.</param>
/// <returns>Report comparing results across profiles.</returns>
public async Task<SkewReport> RunAcrossProfiles(
Func<Task<TestResult>> test,
IEnumerable<EnvironmentProfile> profiles)
{
ArgumentNullException.ThrowIfNull(test);
ArgumentNullException.ThrowIfNull(profiles);
var profileList = profiles.ToList();
var results = new List<ProfileTestResult>();
foreach (var profile in profileList)
{
var profileResult = await RunWithProfile(test, profile);
results.Add(profileResult);
}
return new SkewReport
{
GeneratedAt = DateTimeOffset.UtcNow,
ProfileCount = profileList.Count,
Results = results,
HasSkew = DetectSkew(results),
Summary = GenerateSummary(results)
};
}
/// <summary>
/// Runs a test multiple times within a single profile for variance analysis.
/// </summary>
public async Task<ProfileTestResult> RunWithProfile(
Func<Task<TestResult>> test,
EnvironmentProfile profile,
int iterations = 3)
{
var results = new List<TestResult>();
var startTime = DateTimeOffset.UtcNow;
for (int i = 0; i < iterations; i++)
{
try
{
// Apply profile settings (in a real implementation, this would configure containers)
await ApplyProfile(profile);
var result = await test();
result.ProfileName = profile.Name;
result.Iteration = i;
results.Add(result);
}
catch (Exception ex)
{
results.Add(new TestResult
{
ProfileName = profile.Name,
Iteration = i,
Success = false,
ErrorMessage = ex.Message
});
}
}
return new ProfileTestResult
{
Profile = profile,
Results = results,
AverageValue = results.Where(r => r.Success).Average(r => r.Value),
AverageDurationMs = results.Where(r => r.Success).Average(r => r.DurationMs),
SuccessRate = (double)results.Count(r => r.Success) / results.Count,
StartedAt = startTime,
CompletedAt = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Asserts that results are equivalent across profiles within tolerance.
/// </summary>
/// <param name="report">The skew report to validate.</param>
/// <param name="tolerance">Maximum allowed variance as a fraction (0.05 = 5%).</param>
/// <exception cref="SkewAssertException">Thrown when skew exceeds tolerance.</exception>
public void AssertEquivalence(SkewReport report, double tolerance = 0.05)
{
ArgumentNullException.ThrowIfNull(report);
if (report.Results.Count < 2)
{
return; // Nothing to compare
}
var successfulResults = report.Results
.Where(r => r.SuccessRate > 0)
.ToList();
if (successfulResults.Count < 2)
{
return;
}
// Calculate variance in values
var avgValues = successfulResults.Select(r => r.AverageValue).ToList();
var meanValue = avgValues.Average();
var maxDeviation = avgValues.Max(v => Math.Abs(v - meanValue) / meanValue);
if (maxDeviation > tolerance)
{
throw new SkewAssertException(
$"Value skew detected: maximum deviation {maxDeviation:P1} exceeds tolerance {tolerance:P1}. " +
$"Profile values: {string.Join(", ", successfulResults.Select(r => $"{r.Profile.Name}={r.AverageValue:F2}"))}");
}
// Calculate variance in success rates
var minSuccessRate = successfulResults.Min(r => r.SuccessRate);
var maxSuccessRate = successfulResults.Max(r => r.SuccessRate);
if (maxSuccessRate - minSuccessRate > tolerance)
{
throw new SkewAssertException(
$"Success rate skew detected: range {minSuccessRate:P1} to {maxSuccessRate:P1} exceeds tolerance {tolerance:P1}. " +
$"Profile rates: {string.Join(", ", successfulResults.Select(r => $"{r.Profile.Name}={r.SuccessRate:P1}"))}");
}
}
private static async Task ApplyProfile(EnvironmentProfile profile)
{
// In a real implementation, this would:
// 1. Configure network shaping via tc/netem
// 2. Set resource limits via cgroups
// 3. Configure container runtime settings
// Simulate profile application delay
if (profile.Network.RequiresNetworkShaping)
{
await Task.Delay(1); // Placeholder
}
}
private static bool DetectSkew(List<ProfileTestResult> results)
{
if (results.Count < 2) return false;
var successfulResults = results.Where(r => r.SuccessRate > 0).ToList();
if (successfulResults.Count < 2) return false;
var avgValues = successfulResults.Select(r => r.AverageValue).ToList();
var meanValue = avgValues.Average();
var variance = avgValues.Sum(v => Math.Pow(v - meanValue, 2)) / avgValues.Count;
var stdDev = Math.Sqrt(variance);
var coefficientOfVariation = meanValue > 0 ? stdDev / meanValue : 0;
// Skew detected if coefficient of variation > 10%
return coefficientOfVariation > 0.1;
}
private static string GenerateSummary(List<ProfileTestResult> results)
{
if (results.Count == 0) return "No results";
var sb = new StringBuilder();
sb.AppendLine($"Tested {results.Count} profiles:");
foreach (var result in results)
{
sb.AppendLine($" - {result.Profile.Name}: " +
$"avg={result.AverageValue:F2}, " +
$"duration={result.AverageDurationMs:F0}ms, " +
$"success={result.SuccessRate:P0}");
}
return sb.ToString();
}
}
/// <summary>
/// Result of a single test execution.
/// </summary>
public sealed class TestResult
{
/// <summary>
/// Profile used for this test.
/// </summary>
[JsonPropertyName("profileName")]
public string ProfileName { get; set; } = "";
/// <summary>
/// Iteration number within the profile.
/// </summary>
[JsonPropertyName("iteration")]
public int Iteration { get; set; }
/// <summary>
/// Whether the test succeeded.
/// </summary>
[JsonPropertyName("success")]
public bool Success { get; init; } = true;
/// <summary>
/// Numeric result value for comparison.
/// </summary>
[JsonPropertyName("value")]
public double Value { get; init; }
/// <summary>
/// Test duration in milliseconds.
/// </summary>
[JsonPropertyName("durationMs")]
public long DurationMs { get; init; }
/// <summary>
/// Error message if the test failed.
/// </summary>
[JsonPropertyName("errorMessage")]
public string? ErrorMessage { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
[JsonPropertyName("metadata")]
public Dictionary<string, object> Metadata { get; init; } = [];
}
/// <summary>
/// Results for a specific profile.
/// </summary>
public sealed class ProfileTestResult
{
/// <summary>
/// The profile used.
/// </summary>
[JsonPropertyName("profile")]
public required EnvironmentProfile Profile { get; init; }
/// <summary>
/// Individual test results.
/// </summary>
[JsonPropertyName("results")]
public List<TestResult> Results { get; init; } = [];
/// <summary>
/// Average value across iterations.
/// </summary>
[JsonPropertyName("averageValue")]
public double AverageValue { get; init; }
/// <summary>
/// Average duration in milliseconds.
/// </summary>
[JsonPropertyName("averageDurationMs")]
public double AverageDurationMs { get; init; }
/// <summary>
/// Success rate (0 to 1).
/// </summary>
[JsonPropertyName("successRate")]
public double SuccessRate { get; init; }
/// <summary>
/// When testing started.
/// </summary>
[JsonPropertyName("startedAt")]
public DateTimeOffset StartedAt { get; init; }
/// <summary>
/// When testing completed.
/// </summary>
[JsonPropertyName("completedAt")]
public DateTimeOffset CompletedAt { get; init; }
}
/// <summary>
/// Report comparing results across environment profiles.
/// </summary>
public sealed class SkewReport
{
/// <summary>
/// When the report was generated.
/// </summary>
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Number of profiles tested.
/// </summary>
[JsonPropertyName("profileCount")]
public int ProfileCount { get; init; }
/// <summary>
/// Results for each profile.
/// </summary>
[JsonPropertyName("results")]
public List<ProfileTestResult> Results { get; init; } = [];
/// <summary>
/// Whether significant skew was detected.
/// </summary>
[JsonPropertyName("hasSkew")]
public bool HasSkew { get; init; }
/// <summary>
/// Human-readable summary.
/// </summary>
[JsonPropertyName("summary")]
public string Summary { get; init; } = "";
/// <summary>
/// Serializes the report to JSON.
/// </summary>
public string ToJson()
{
var options = new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
return JsonSerializer.Serialize(this, options);
}
/// <summary>
/// Generates a Markdown summary.
/// </summary>
public string ToMarkdown()
{
var sb = new StringBuilder();
sb.AppendLine("# Environment Skew Report");
sb.AppendLine();
sb.AppendLine($"**Generated:** {GeneratedAt:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine($"**Profiles Tested:** {ProfileCount}");
sb.AppendLine($"**Skew Detected:** {(HasSkew ? "Yes" : "No")}");
sb.AppendLine();
sb.AppendLine("## Results by Profile");
sb.AppendLine();
sb.AppendLine("| Profile | Avg Value | Avg Duration | Success Rate |");
sb.AppendLine("|---------|-----------|--------------|--------------|");
foreach (var result in Results)
{
sb.AppendLine($"| {result.Profile.Name} | " +
$"{result.AverageValue:F2} | " +
$"{result.AverageDurationMs:F0}ms | " +
$"{result.SuccessRate:P0} |");
}
sb.AppendLine();
sb.AppendLine("## Summary");
sb.AppendLine();
sb.AppendLine(Summary);
return sb.ToString();
}
}
/// <summary>
/// Exception thrown when environment skew exceeds tolerance.
/// </summary>
public sealed class SkewAssertException : Exception
{
/// <summary>
/// Creates a new skew assertion exception.
/// </summary>
public SkewAssertException(string message) : base(message)
{
}
}

View File

@@ -0,0 +1,228 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.TestKit.Evidence;
/// <summary>
/// Assertion helpers for evidence chain traceability testing.
/// </summary>
/// <remarks>
/// These assertions validate evidence chain properties:
/// - Artifact hash stability (same inputs produce same hashes)
/// - Artifact immutability (repeated generation produces identical outputs)
/// - Traceability completeness (requirement -> test -> artifact linkage)
///
/// Usage:
/// <code>
/// [Fact]
/// [Requirement("REQ-EVIDENCE-001")]
/// public void Test_ArtifactHashStability()
/// {
/// var artifact = GenerateEvidence(input);
/// EvidenceChainAssert.ArtifactHashStable(artifact, "abc123...expected-sha256...");
/// }
///
/// [Fact]
/// [Requirement("REQ-DETERMINISM-001")]
/// public void Test_EvidenceImmutability()
/// {
/// EvidenceChainAssert.ArtifactImmutable(() => GenerateEvidence(fixedInput), iterations: 100);
/// }
/// </code>
/// </remarks>
public static class EvidenceChainAssert
{
/// <summary>
/// Asserts that an artifact has the expected SHA-256 hash.
/// </summary>
/// <param name="artifact">The artifact bytes to hash.</param>
/// <param name="expectedHashHex">Expected SHA-256 hash in lowercase hexadecimal.</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when hash doesn't match.</exception>
public static void ArtifactHashStable(byte[] artifact, string expectedHashHex)
{
ArgumentNullException.ThrowIfNull(artifact);
ArgumentNullException.ThrowIfNull(expectedHashHex);
var actualHash = ComputeSha256(artifact);
if (!string.Equals(actualHash, expectedHashHex, StringComparison.OrdinalIgnoreCase))
{
throw new EvidenceTraceabilityException(
$"Artifact hash mismatch.\n" +
$"Expected: {expectedHashHex}\n" +
$"Actual: {actualHash}\n" +
"This indicates the artifact is not deterministic or has changed.");
}
}
/// <summary>
/// Asserts that an artifact has the expected SHA-256 hash (string content).
/// </summary>
/// <param name="content">The content string to hash (UTF-8 encoded).</param>
/// <param name="expectedHashHex">Expected SHA-256 hash in lowercase hexadecimal.</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when hash doesn't match.</exception>
public static void ArtifactHashStable(string content, string expectedHashHex)
{
ArgumentNullException.ThrowIfNull(content);
ArtifactHashStable(Encoding.UTF8.GetBytes(content), expectedHashHex);
}
/// <summary>
/// Asserts that an artifact generator produces identical output across multiple invocations.
/// </summary>
/// <param name="artifactGenerator">Function that generates the artifact.</param>
/// <param name="iterations">Number of iterations to verify (default 10).</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when outputs differ.</exception>
public static void ArtifactImmutable(Func<byte[]> artifactGenerator, int iterations = 10)
{
ArgumentNullException.ThrowIfNull(artifactGenerator);
if (iterations < 2)
{
throw new ArgumentOutOfRangeException(nameof(iterations), "Must be at least 2");
}
var firstResult = artifactGenerator();
var firstHash = ComputeSha256(firstResult);
for (int i = 1; i < iterations; i++)
{
var result = artifactGenerator();
var hash = ComputeSha256(result);
if (hash != firstHash)
{
throw new EvidenceTraceabilityException(
$"Artifact not immutable: iteration {i + 1} produced different output.\n" +
$"First hash: {firstHash}\n" +
$"Current hash: {hash}\n" +
"This indicates non-deterministic behavior in artifact generation.");
}
}
}
/// <summary>
/// Asserts that an artifact generator produces identical string output across multiple invocations.
/// </summary>
/// <param name="artifactGenerator">Function that generates the artifact string.</param>
/// <param name="iterations">Number of iterations to verify (default 10).</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when outputs differ.</exception>
public static void ArtifactImmutable(Func<string> artifactGenerator, int iterations = 10)
{
ArgumentNullException.ThrowIfNull(artifactGenerator);
ArtifactImmutable(() => Encoding.UTF8.GetBytes(artifactGenerator()), iterations);
}
/// <summary>
/// Asserts that a test method has a RequirementAttribute linking it to a requirement.
/// </summary>
/// <param name="testMethod">The test method to check.</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when requirement link is missing.</exception>
public static void RequirementLinked(System.Reflection.MethodInfo testMethod)
{
ArgumentNullException.ThrowIfNull(testMethod);
var reqAttr = testMethod.GetCustomAttributes(typeof(RequirementAttribute), true)
.Cast<RequirementAttribute>()
.FirstOrDefault();
if (reqAttr == null)
{
throw new EvidenceTraceabilityException(
$"Test method '{testMethod.DeclaringType?.Name}.{testMethod.Name}' " +
"is missing [Requirement] attribute for evidence traceability.");
}
}
/// <summary>
/// Asserts that the current test has requirement traceability configured.
/// </summary>
/// <remarks>
/// Call this at the start of regulatory/compliance tests to ensure traceability.
/// </remarks>
/// <param name="requirementId">Expected requirement ID.</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when requirement doesn't match.</exception>
public static void RequirementLinked(string requirementId)
{
ArgumentNullException.ThrowIfNull(requirementId);
// This is a marker assertion - actual verification happens in the report generator
// when it scans test methods for RequirementAttribute matching this ID
if (string.IsNullOrWhiteSpace(requirementId))
{
throw new EvidenceTraceabilityException(
"Requirement ID cannot be empty for evidence traceability.");
}
}
/// <summary>
/// Asserts that all components of a traceability chain are present.
/// </summary>
/// <param name="requirementId">The requirement being validated.</param>
/// <param name="testId">The test identifier (e.g., "MyTests.TestMethod").</param>
/// <param name="artifactId">The artifact identifier (e.g., hash or content-address).</param>
/// <exception cref="EvidenceTraceabilityException">Thrown when any component is missing.</exception>
public static void TraceabilityComplete(string requirementId, string testId, string artifactId)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(requirementId))
{
errors.Add("Requirement ID is missing");
}
if (string.IsNullOrWhiteSpace(testId))
{
errors.Add("Test ID is missing");
}
if (string.IsNullOrWhiteSpace(artifactId))
{
errors.Add("Artifact ID is missing");
}
if (errors.Count > 0)
{
throw new EvidenceTraceabilityException(
$"Traceability chain incomplete:\n- {string.Join("\n- ", errors)}\n" +
$"Required: Requirement[{requirementId ?? "null"}] -> Test[{testId ?? "null"}] -> Artifact[{artifactId ?? "null"}]");
}
}
/// <summary>
/// Computes the SHA-256 hash of a byte array and returns it as lowercase hex.
/// </summary>
public static string ComputeSha256(byte[] data)
{
var hash = SHA256.HashData(data);
return Convert.ToHexStringLower(hash);
}
/// <summary>
/// Computes the SHA-256 hash of a string (UTF-8 encoded) and returns it as lowercase hex.
/// </summary>
public static string ComputeSha256(string content)
{
return ComputeSha256(Encoding.UTF8.GetBytes(content));
}
}
/// <summary>
/// Exception thrown when evidence traceability assertions fail.
/// </summary>
public sealed class EvidenceTraceabilityException : Exception
{
/// <summary>
/// Creates a new evidence traceability exception.
/// </summary>
public EvidenceTraceabilityException(string message) : base(message)
{
}
/// <summary>
/// Creates a new evidence traceability exception with inner exception.
/// </summary>
public EvidenceTraceabilityException(string message, Exception innerException)
: base(message, innerException)
{
}
}

View File

@@ -0,0 +1,339 @@
using System.Reflection;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Evidence;
/// <summary>
/// Generates evidence chain traceability reports from test assemblies.
/// </summary>
/// <remarks>
/// The reporter scans test assemblies for [Requirement] attributes and generates
/// a traceability matrix showing requirement -> test -> artifact linkage.
///
/// Usage:
/// <code>
/// var reporter = new EvidenceChainReporter();
/// reporter.AddAssembly(typeof(MyTests).Assembly);
/// var report = reporter.GenerateReport();
/// Console.WriteLine(report.ToMarkdown());
/// </code>
/// </remarks>
public sealed class EvidenceChainReporter
{
private readonly List<Assembly> _assemblies = [];
private readonly Dictionary<string, List<TestEvidence>> _requirementMap = new(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Adds an assembly to scan for requirement-linked tests.
/// </summary>
public void AddAssembly(Assembly assembly)
{
ArgumentNullException.ThrowIfNull(assembly);
if (!_assemblies.Contains(assembly))
{
_assemblies.Add(assembly);
}
}
/// <summary>
/// Scans all added assemblies and generates a traceability report.
/// </summary>
public EvidenceChainReport GenerateReport()
{
_requirementMap.Clear();
foreach (var assembly in _assemblies)
{
ScanAssembly(assembly);
}
var requirements = _requirementMap
.Select(kvp => new RequirementTraceability
{
RequirementId = kvp.Key,
Tests = kvp.Value.OrderBy(t => t.TestId).ToList(),
TestCount = kvp.Value.Count
})
.OrderBy(r => r.RequirementId)
.ToList();
var totalTests = requirements.Sum(r => r.TestCount);
var orphanedRequirements = requirements.Where(r => r.TestCount == 0).Select(r => r.RequirementId).ToList();
return new EvidenceChainReport
{
GeneratedAt = DateTimeOffset.UtcNow,
AssembliesScanned = _assemblies.Select(a => a.GetName().Name ?? a.FullName ?? "Unknown").ToList(),
Requirements = requirements,
TotalRequirements = requirements.Count,
TotalTests = totalTests,
OrphanedRequirements = orphanedRequirements,
Warnings = GenerateWarnings(requirements)
};
}
private void ScanAssembly(Assembly assembly)
{
var testTypes = assembly.GetTypes()
.Where(t => t.IsClass && !t.IsAbstract);
foreach (var type in testTypes)
{
// Check class-level requirement attributes
var classRequirements = type.GetCustomAttributes<RequirementAttribute>(true).ToList();
foreach (var method in type.GetMethods(BindingFlags.Public | BindingFlags.Instance))
{
// Check if this is a test method (has Fact, Theory, or Test attribute)
var isTest = method.GetCustomAttributes(true)
.Any(a => a.GetType().Name is "FactAttribute" or "TheoryAttribute" or "TestAttribute");
if (!isTest)
{
continue;
}
var methodRequirements = method.GetCustomAttributes<RequirementAttribute>(true).ToList();
var allRequirements = classRequirements.Concat(methodRequirements).ToList();
foreach (var req in allRequirements)
{
if (!_requirementMap.TryGetValue(req.RequirementId, out var tests))
{
tests = [];
_requirementMap[req.RequirementId] = tests;
}
var testId = $"{type.FullName}.{method.Name}";
// Avoid duplicates
if (!tests.Any(t => t.TestId == testId))
{
tests.Add(new TestEvidence
{
TestId = testId,
TestName = method.Name,
TestClass = type.FullName ?? type.Name,
SprintTaskId = req.SprintTaskId,
ComplianceControl = req.ComplianceControl,
SourceDocument = req.SourceDocument,
AssemblyName = assembly.GetName().Name ?? "Unknown"
});
}
}
}
}
}
private static List<string> GenerateWarnings(List<RequirementTraceability> requirements)
{
var warnings = new List<string>();
// Check for requirements with no tests
var emptyRequirements = requirements.Where(r => r.TestCount == 0).ToList();
if (emptyRequirements.Count > 0)
{
warnings.Add($"Requirements with no linked tests: {string.Join(", ", emptyRequirements.Select(r => r.RequirementId))}");
}
// Check for requirements with very few tests (potential coverage gaps)
var lowCoverageRequirements = requirements.Where(r => r.TestCount == 1).ToList();
if (lowCoverageRequirements.Count > 0)
{
warnings.Add($"Requirements with only 1 test (consider additional coverage): {string.Join(", ", lowCoverageRequirements.Select(r => r.RequirementId))}");
}
return warnings;
}
}
/// <summary>
/// Evidence chain traceability report.
/// </summary>
public sealed class EvidenceChainReport
{
/// <summary>
/// When the report was generated.
/// </summary>
public DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Names of assemblies that were scanned.
/// </summary>
public List<string> AssembliesScanned { get; init; } = [];
/// <summary>
/// Traceability data for each requirement.
/// </summary>
public List<RequirementTraceability> Requirements { get; init; } = [];
/// <summary>
/// Total number of requirements found.
/// </summary>
public int TotalRequirements { get; init; }
/// <summary>
/// Total number of tests linked to requirements.
/// </summary>
public int TotalTests { get; init; }
/// <summary>
/// Requirements that have no linked tests.
/// </summary>
public List<string> OrphanedRequirements { get; init; } = [];
/// <summary>
/// Warning messages about coverage gaps.
/// </summary>
public List<string> Warnings { get; init; } = [];
/// <summary>
/// Generates a JSON representation of the report.
/// </summary>
public string ToJson()
{
var options = new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
return JsonSerializer.Serialize(this, options);
}
/// <summary>
/// Generates a Markdown representation of the report.
/// </summary>
public string ToMarkdown()
{
var sb = new StringBuilder();
sb.AppendLine("# Evidence Chain Traceability Report");
sb.AppendLine();
sb.AppendLine($"**Generated:** {GeneratedAt:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine($"**Assemblies Scanned:** {string.Join(", ", AssembliesScanned)}");
sb.AppendLine($"**Total Requirements:** {TotalRequirements}");
sb.AppendLine($"**Total Tests:** {TotalTests}");
sb.AppendLine();
if (Warnings.Count > 0)
{
sb.AppendLine("## Warnings");
sb.AppendLine();
foreach (var warning in Warnings)
{
sb.AppendLine($"- {warning}");
}
sb.AppendLine();
}
sb.AppendLine("## Traceability Matrix");
sb.AppendLine();
sb.AppendLine("| Requirement | Test Count | Tests |");
sb.AppendLine("|-------------|------------|-------|");
foreach (var req in Requirements)
{
var testLinks = req.Tests.Count > 3
? $"{string.Join(", ", req.Tests.Take(3).Select(t => t.TestName))} (+{req.Tests.Count - 3} more)"
: string.Join(", ", req.Tests.Select(t => t.TestName));
sb.AppendLine($"| {req.RequirementId} | {req.TestCount} | {testLinks} |");
}
sb.AppendLine();
sb.AppendLine("## Detailed Test Mapping");
sb.AppendLine();
foreach (var req in Requirements)
{
sb.AppendLine($"### {req.RequirementId}");
sb.AppendLine();
if (req.Tests.Count == 0)
{
sb.AppendLine("*No tests linked to this requirement.*");
}
else
{
sb.AppendLine("| Test | Class | Sprint Task | Compliance |");
sb.AppendLine("|------|-------|-------------|------------|");
foreach (var test in req.Tests)
{
var sprintTask = string.IsNullOrEmpty(test.SprintTaskId) ? "-" : test.SprintTaskId;
var compliance = string.IsNullOrEmpty(test.ComplianceControl) ? "-" : test.ComplianceControl;
sb.AppendLine($"| {test.TestName} | {test.TestClass} | {sprintTask} | {compliance} |");
}
}
sb.AppendLine();
}
return sb.ToString();
}
}
/// <summary>
/// Traceability data for a single requirement.
/// </summary>
public sealed class RequirementTraceability
{
/// <summary>
/// The requirement identifier.
/// </summary>
public string RequirementId { get; init; } = "";
/// <summary>
/// Tests linked to this requirement.
/// </summary>
public List<TestEvidence> Tests { get; init; } = [];
/// <summary>
/// Number of tests linked to this requirement.
/// </summary>
public int TestCount { get; init; }
}
/// <summary>
/// Evidence data for a single test.
/// </summary>
public sealed class TestEvidence
{
/// <summary>
/// Fully qualified test identifier (namespace.class.method).
/// </summary>
public string TestId { get; init; } = "";
/// <summary>
/// Test method name.
/// </summary>
public string TestName { get; init; } = "";
/// <summary>
/// Fully qualified test class name.
/// </summary>
public string TestClass { get; init; } = "";
/// <summary>
/// Sprint task ID if specified.
/// </summary>
public string SprintTaskId { get; init; } = "";
/// <summary>
/// Compliance control reference if specified.
/// </summary>
public string ComplianceControl { get; init; } = "";
/// <summary>
/// Source document reference if specified.
/// </summary>
public string SourceDocument { get; init; } = "";
/// <summary>
/// Assembly containing the test.
/// </summary>
public string AssemblyName { get; init; } = "";
}

View File

@@ -0,0 +1,102 @@
using Xunit.v3;
namespace StellaOps.TestKit.Evidence;
/// <summary>
/// Links a test method to a requirement identifier for evidence traceability.
/// </summary>
/// <remarks>
/// Evidence traceability ensures that every critical behavior links:
/// requirement -> test -> run -> artifact -> deployed version.
///
/// Usage:
/// <code>
/// [Fact]
/// [Requirement("REQ-AUTH-001", SprintTaskId = "AUTH-0127-001")]
/// public async Task TestUserAuthentication()
/// {
/// // Verify authentication works as required
/// }
///
/// [Fact]
/// [Requirement("REQ-AUDIT-002", SprintTaskId = "AUDIT-0127-003")]
/// [Intent(TestIntents.Regulatory, "Required for SOC2 AU-12")]
/// public void TestAuditLogImmutability()
/// {
/// // Verify audit logs cannot be modified
/// }
/// </code>
///
/// The attribute automatically adds xUnit Traits for filtering:
/// <code>
/// dotnet test --filter "Requirement=REQ-AUTH-001"
/// dotnet test --filter "SprintTask=AUTH-0127-001"
/// </code>
/// </remarks>
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = true, Inherited = true)]
public sealed class RequirementAttribute : Attribute, ITraitAttribute
{
/// <summary>
/// The requirement identifier this test validates.
/// </summary>
/// <remarks>
/// Should match requirement IDs in your requirements management system
/// (e.g., "REQ-AUTH-001", "SECURITY-003", "FR-SBOM-001").
/// </remarks>
public string RequirementId { get; }
/// <summary>
/// Optional sprint task ID that implemented this requirement.
/// </summary>
/// <remarks>
/// Links to the sprint task in docs/implplan/SPRINT_*.md files.
/// Format: "<MODULE>-<DATE>-<TASK>" (e.g., "AUTH-0127-001").
/// </remarks>
public string SprintTaskId { get; init; } = "";
/// <summary>
/// Optional compliance control reference.
/// </summary>
/// <remarks>
/// Links to external compliance controls (e.g., "SOC2-CC6.1", "GDPR-Art.17").
/// </remarks>
public string ComplianceControl { get; init; } = "";
/// <summary>
/// Optional requirement source document.
/// </summary>
/// <remarks>
/// Path or URL to the document defining this requirement.
/// </remarks>
public string SourceDocument { get; init; } = "";
/// <summary>
/// Creates a requirement link for the test.
/// </summary>
/// <param name="requirementId">The requirement identifier.</param>
public RequirementAttribute(string requirementId)
{
RequirementId = requirementId ?? throw new ArgumentNullException(nameof(requirementId));
}
/// <inheritdoc />
public IReadOnlyCollection<KeyValuePair<string, string>> GetTraits()
{
var traits = new List<KeyValuePair<string, string>>
{
new("Requirement", RequirementId)
};
if (!string.IsNullOrWhiteSpace(SprintTaskId))
{
traits.Add(new("SprintTask", SprintTaskId));
}
if (!string.IsNullOrWhiteSpace(ComplianceControl))
{
traits.Add(new("ComplianceControl", ComplianceControl));
}
return traits;
}
}

View File

@@ -37,8 +37,28 @@ public static class ContractTestHelper
var actualNormalized = NormalizeOpenApiSchema(actualSchema);
var expectedNormalized = NormalizeOpenApiSchema(expectedSchema);
actualNormalized.Should().Be(expectedNormalized,
"OpenAPI schema should match snapshot. Set STELLAOPS_UPDATE_FIXTURES=true to update.");
// Use Assert.Equal instead of FluentAssertions to avoid format string issues
// when comparing JSON with curly braces
if (!string.Equals(actualNormalized, expectedNormalized, StringComparison.Ordinal))
{
// Find first difference for helpful error message
var diffIndex = FindFirstDifference(actualNormalized, expectedNormalized);
var contextStart = Math.Max(0, diffIndex - 50);
var contextEnd = Math.Min(Math.Max(actualNormalized.Length, expectedNormalized.Length), diffIndex + 50);
var actualContext = diffIndex < actualNormalized.Length
? actualNormalized.Substring(contextStart, Math.Min(contextEnd - contextStart, actualNormalized.Length - contextStart))
: "(end of string)";
var expectedContext = diffIndex < expectedNormalized.Length
? expectedNormalized.Substring(contextStart, Math.Min(contextEnd - contextStart, expectedNormalized.Length - contextStart))
: "(end of string)";
throw new Xunit.Sdk.XunitException(
$"OpenAPI schema should match snapshot. Set STELLAOPS_UPDATE_FIXTURES=true to update.\n" +
$"First difference at position {diffIndex}:\n" +
$"Actual: ...{actualContext}...\n" +
$"Expected: ...{expectedContext}...");
}
}
/// <summary>
@@ -156,17 +176,39 @@ public static class ContractTestHelper
return new SchemaBreakingChanges(breakingChanges, nonBreakingChanges);
}
private static int FindFirstDifference(string a, string b)
{
var minLength = Math.Min(a.Length, b.Length);
for (var i = 0; i < minLength; i++)
{
if (a[i] != b[i])
{
return i;
}
}
return minLength; // One string is a prefix of the other
}
private static string NormalizeOpenApiSchema(string schema)
{
try
{
var doc = JsonDocument.Parse(schema);
// Remove non-deterministic fields
return JsonSerializer.Serialize(doc, new JsonSerializerOptions
{
// Remove non-deterministic fields like version hash
var serialized = JsonSerializer.Serialize(doc, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
// Strip version hash suffix (e.g., "1.0.0+abc123" -> "1.0.0")
// This prevents test failures due to git commit hash changes
// Note: The + character may be serialized as literal "+" or as "\u002B" unicode escape
return System.Text.RegularExpressions.Regex.Replace(
serialized,
@"(""version""\s*:\s*""[^""]*?)(\+|\\u002[Bb])[a-f0-9]+""",
@"$1""",
System.Text.RegularExpressions.RegexOptions.IgnoreCase);
}
catch
{
@@ -176,7 +218,7 @@ public static class ContractTestHelper
private static bool ShouldUpdateSnapshots()
{
return Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") == "true";
return global::System.Environment.GetEnvironmentVariable("STELLAOPS_UPDATE_FIXTURES") == "true";
}
private static async Task UpdateSnapshotAsync(string path, string content)

View File

@@ -0,0 +1,124 @@
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Incident;
/// <summary>
/// Metadata describing a production incident for test generation.
/// </summary>
/// <remarks>
/// Every production incident should produce a permanent regression test.
/// This record captures the incident context needed to generate and maintain
/// that test over time.
///
/// Usage:
/// <code>
/// var metadata = new IncidentMetadata
/// {
/// IncidentId = "INC-2026-001",
/// OccurredAt = DateTimeOffset.Parse("2026-01-15T10:30:00Z"),
/// RootCause = "Race condition in concurrent bundle creation",
/// AffectedModules = ["EvidenceLocker", "Policy"],
/// Severity = IncidentSeverity.P1,
/// Title = "Evidence bundle duplication in high-concurrency scenario"
/// };
/// </code>
/// </remarks>
public sealed record IncidentMetadata
{
/// <summary>
/// Unique incident identifier from the incident management system.
/// </summary>
/// <example>INC-2026-001, PROD-0115-003</example>
[JsonPropertyName("incidentId")]
public required string IncidentId { get; init; }
/// <summary>
/// When the incident occurred (UTC).
/// </summary>
[JsonPropertyName("occurredAt")]
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Brief description of the root cause.
/// </summary>
[JsonPropertyName("rootCause")]
public required string RootCause { get; init; }
/// <summary>
/// Modules affected by the incident.
/// </summary>
[JsonPropertyName("affectedModules")]
public required string[] AffectedModules { get; init; }
/// <summary>
/// Incident severity level.
/// </summary>
[JsonPropertyName("severity")]
public required IncidentSeverity Severity { get; init; }
/// <summary>
/// Short descriptive title for the incident.
/// </summary>
[JsonPropertyName("title")]
public string Title { get; init; } = "";
/// <summary>
/// Link to the incident report or postmortem.
/// </summary>
[JsonPropertyName("reportUrl")]
public string ReportUrl { get; init; } = "";
/// <summary>
/// When the incident was resolved.
/// </summary>
[JsonPropertyName("resolvedAt")]
public DateTimeOffset? ResolvedAt { get; init; }
/// <summary>
/// Correlation IDs from the incident for replay matching.
/// </summary>
[JsonPropertyName("correlationIds")]
public string[] CorrelationIds { get; init; } = [];
/// <summary>
/// Sprint task ID that implemented the fix.
/// </summary>
[JsonPropertyName("fixTaskId")]
public string FixTaskId { get; init; } = "";
/// <summary>
/// Tags for categorization (e.g., "race-condition", "timeout", "data-corruption").
/// </summary>
[JsonPropertyName("tags")]
public string[] Tags { get; init; } = [];
}
/// <summary>
/// Incident severity levels.
/// </summary>
public enum IncidentSeverity
{
/// <summary>
/// Critical incident: service down, data loss, security breach.
/// Tests for P1 incidents block releases.
/// </summary>
P1 = 1,
/// <summary>
/// Major incident: significant degradation, partial outage.
/// Tests for P2 incidents block releases.
/// </summary>
P2 = 2,
/// <summary>
/// Minor incident: limited impact, workaround available.
/// Tests for P3 incidents are warning-only in CI.
/// </summary>
P3 = 3,
/// <summary>
/// Low-impact incident: cosmetic issues, minor bugs.
/// Tests for P4 incidents are informational.
/// </summary>
P4 = 4
}

View File

@@ -0,0 +1,358 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace StellaOps.TestKit.Incident;
/// <summary>
/// Generates test scaffolds from replay manifests and incident metadata.
/// </summary>
/// <remarks>
/// This generator creates regression test scaffolds from production incidents.
/// The workflow is:
/// 1. Incident triggers capture of event sequence (existing replay infrastructure).
/// 2. Replay manifest exported with correlation IDs and timestamps.
/// 3. This generator creates a test scaffold from the manifest.
/// 4. Human reviews and approves the test for permanent inclusion.
///
/// Usage:
/// <code>
/// var generator = new IncidentTestGenerator();
/// var manifest = LoadReplayManifest("path/to/manifest.json");
/// var metadata = new IncidentMetadata
/// {
/// IncidentId = "INC-2026-001",
/// OccurredAt = DateTimeOffset.UtcNow,
/// RootCause = "Race condition in concurrent writes",
/// AffectedModules = ["EvidenceLocker"],
/// Severity = IncidentSeverity.P1
/// };
///
/// var scaffold = generator.GenerateFromManifestJson(manifest, metadata);
/// var code = scaffold.GenerateTestCode();
/// </code>
/// </remarks>
public sealed class IncidentTestGenerator
{
private readonly Dictionary<string, TestScaffold> _registeredTests = new(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Generates a test scaffold from a replay manifest JSON.
/// </summary>
/// <param name="manifestJson">The replay manifest as JSON string.</param>
/// <param name="metadata">Incident metadata.</param>
/// <returns>A test scaffold ready for code generation.</returns>
public TestScaffold GenerateFromManifestJson(string manifestJson, IncidentMetadata metadata)
{
ArgumentNullException.ThrowIfNull(manifestJson);
ArgumentNullException.ThrowIfNull(metadata);
var manifestHash = ComputeHash(manifestJson);
// Parse manifest to extract relevant data
using var doc = JsonDocument.Parse(manifestJson);
var root = doc.RootElement;
var inputFixtures = ExtractInputFixtures(root);
var expectedOutputs = ExtractExpectedOutputs(root);
var implementationNotes = GenerateImplementationNotes(root, metadata);
var testClassName = GenerateClassName(metadata);
var testMethodName = GenerateMethodName(metadata);
return new TestScaffold
{
Metadata = metadata,
TestClassName = testClassName,
TestMethodName = testMethodName,
Namespace = DetermineNamespace(metadata),
InputFixtures = inputFixtures,
ExpectedOutputs = expectedOutputs,
ReplayManifestHash = manifestHash,
GeneratedAt = DateTimeOffset.UtcNow,
Traits = GenerateTraits(metadata),
ImplementationNotes = implementationNotes
};
}
/// <summary>
/// Registers an incident test for tracking.
/// </summary>
/// <param name="incidentId">The incident identifier.</param>
/// <param name="scaffold">The test scaffold.</param>
public void RegisterIncidentTest(string incidentId, TestScaffold scaffold)
{
ArgumentNullException.ThrowIfNull(incidentId);
ArgumentNullException.ThrowIfNull(scaffold);
_registeredTests[incidentId] = scaffold;
}
/// <summary>
/// Gets all registered incident tests.
/// </summary>
public IReadOnlyDictionary<string, TestScaffold> RegisteredTests => _registeredTests;
/// <summary>
/// Generates a summary report of registered incident tests.
/// </summary>
public IncidentTestReport GenerateReport()
{
var tests = _registeredTests.Values.ToList();
return new IncidentTestReport
{
GeneratedAt = DateTimeOffset.UtcNow,
TotalTests = tests.Count,
BySeveority = tests
.GroupBy(t => t.Metadata.Severity)
.ToDictionary(g => g.Key, g => g.Count()),
ByModule = tests
.SelectMany(t => t.Metadata.AffectedModules.Select(m => (Module: m, Test: t)))
.GroupBy(x => x.Module)
.ToDictionary(g => g.Key, g => g.Count()),
Tests = tests.Select(t => new IncidentTestSummary
{
IncidentId = t.Metadata.IncidentId,
Title = t.Metadata.Title,
Severity = t.Metadata.Severity,
AffectedModules = t.Metadata.AffectedModules,
TestClassName = t.TestClassName,
GeneratedAt = t.GeneratedAt
}).ToList()
};
}
private static Dictionary<string, string> ExtractInputFixtures(JsonElement root)
{
var fixtures = new Dictionary<string, string>();
// Extract scan metadata as fixture
if (root.TryGetProperty("scan", out var scan))
{
fixtures["scan"] = scan.GetRawText();
}
// Extract reachability section as fixture
if (root.TryGetProperty("reachability", out var reachability))
{
if (reachability.TryGetProperty("graphs", out var graphs))
{
fixtures["reachabilityGraphs"] = graphs.GetRawText();
}
if (reachability.TryGetProperty("runtimeTraces", out var traces))
{
fixtures["runtimeTraces"] = traces.GetRawText();
}
}
// Extract proof spines if present
if (root.TryGetProperty("proofSpines", out var spines))
{
fixtures["proofSpines"] = spines.GetRawText();
}
return fixtures;
}
private static Dictionary<string, string> ExtractExpectedOutputs(JsonElement root)
{
var outputs = new Dictionary<string, string>();
// Extract policy digest as expected output
if (root.TryGetProperty("scan", out var scan))
{
if (scan.TryGetProperty("policyDigest", out var policyDigest) &&
policyDigest.ValueKind == JsonValueKind.String)
{
outputs["policyDigest"] = policyDigest.GetString()!;
}
if (scan.TryGetProperty("scorePolicyDigest", out var scoreDigest) &&
scoreDigest.ValueKind == JsonValueKind.String)
{
outputs["scorePolicyDigest"] = scoreDigest.GetString()!;
}
}
// Extract graph hashes as expected outputs
if (root.TryGetProperty("reachability", out var reachability) &&
reachability.TryGetProperty("graphs", out var graphs) &&
graphs.ValueKind == JsonValueKind.Array)
{
var graphHashes = new List<string>();
foreach (var graph in graphs.EnumerateArray())
{
if (graph.TryGetProperty("hash", out var hash) &&
hash.ValueKind == JsonValueKind.String)
{
graphHashes.Add(hash.GetString()!);
}
}
if (graphHashes.Count > 0)
{
outputs["graphHashes"] = string.Join(",", graphHashes);
}
}
return outputs;
}
private static List<string> GenerateImplementationNotes(JsonElement root, IncidentMetadata metadata)
{
var notes = new List<string>
{
$"This test validates the fix for incident {metadata.IncidentId}.",
$"Root cause: {metadata.RootCause}",
$"Affected modules: {string.Join(", ", metadata.AffectedModules)}"
};
// Add notes based on manifest content
if (root.TryGetProperty("schemaVersion", out var version))
{
notes.Add($"Replay manifest version: {version.GetString()}");
}
if (root.TryGetProperty("scan", out var scan) &&
scan.TryGetProperty("time", out var time))
{
notes.Add($"Original scan time: {time.GetString()}");
}
notes.Add("Review the fixtures and expected outputs before finalizing the test.");
notes.Add("Ensure deterministic fixtures are used for reproducibility.");
if (metadata.Severity == IncidentSeverity.P1 || metadata.Severity == IncidentSeverity.P2)
{
notes.Add($"IMPORTANT: This is a {metadata.Severity} incident - test failures will block releases.");
}
return notes;
}
private static string GenerateClassName(IncidentMetadata metadata)
{
// Convert incident ID to valid class name
// INC-2026-001 -> Incident_INC_2026_001_Tests
var sanitized = Regex.Replace(metadata.IncidentId, @"[^a-zA-Z0-9]", "_");
return $"Incident_{sanitized}_Tests";
}
private static string GenerateMethodName(IncidentMetadata metadata)
{
// Generate method name from root cause
// "Race condition in concurrent writes" -> Validates_RaceCondition_Fix
var words = metadata.RootCause
.Split(' ', StringSplitOptions.RemoveEmptyEntries)
.Take(3)
.Select(w => char.ToUpperInvariant(w[0]) + w[1..].ToLowerInvariant());
return $"Validates_{string.Join("", words)}_Fix";
}
private static string DetermineNamespace(IncidentMetadata metadata)
{
// Use first affected module for namespace
var module = metadata.AffectedModules.FirstOrDefault() ?? "Core";
return $"StellaOps.{module}.Tests.PostIncident";
}
private static Dictionary<string, string> GenerateTraits(IncidentMetadata metadata)
{
var traits = new Dictionary<string, string>();
foreach (var module in metadata.AffectedModules)
{
traits[$"Module:{module}"] = "true";
}
foreach (var tag in metadata.Tags)
{
traits[$"Tag:{tag}"] = "true";
}
if (!string.IsNullOrEmpty(metadata.FixTaskId))
{
traits["SprintTask"] = metadata.FixTaskId;
}
return traits;
}
private static string ComputeHash(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
}
/// <summary>
/// Summary report of registered incident tests.
/// </summary>
public sealed class IncidentTestReport
{
/// <summary>
/// When the report was generated.
/// </summary>
public DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Total number of incident tests.
/// </summary>
public int TotalTests { get; init; }
/// <summary>
/// Count of tests by severity.
/// </summary>
public Dictionary<IncidentSeverity, int> BySeveority { get; init; } = [];
/// <summary>
/// Count of tests by affected module.
/// </summary>
public Dictionary<string, int> ByModule { get; init; } = [];
/// <summary>
/// Summary of each test.
/// </summary>
public List<IncidentTestSummary> Tests { get; init; } = [];
}
/// <summary>
/// Summary of a single incident test.
/// </summary>
public sealed class IncidentTestSummary
{
/// <summary>
/// The incident identifier.
/// </summary>
public string IncidentId { get; init; } = "";
/// <summary>
/// Incident title.
/// </summary>
public string Title { get; init; } = "";
/// <summary>
/// Incident severity.
/// </summary>
public IncidentSeverity Severity { get; init; }
/// <summary>
/// Affected modules.
/// </summary>
public string[] AffectedModules { get; init; } = [];
/// <summary>
/// Generated test class name.
/// </summary>
public string TestClassName { get; init; } = "";
/// <summary>
/// When the test was generated.
/// </summary>
public DateTimeOffset GeneratedAt { get; init; }
}

View File

@@ -0,0 +1,232 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Incident;
/// <summary>
/// Represents a generated test scaffold from an incident or replay manifest.
/// </summary>
/// <remarks>
/// The scaffold provides a starting point for creating a regression test.
/// It includes the incident context, input fixtures, and expected outcomes
/// derived from the replay manifest.
///
/// Usage:
/// <code>
/// var generator = new IncidentTestGenerator();
/// var scaffold = generator.GenerateFromReplayManifest(manifest, metadata);
///
/// // Generate test code
/// var code = scaffold.GenerateTestCode();
/// File.WriteAllText($"Tests/{scaffold.TestClassName}.cs", code);
/// </code>
/// </remarks>
public sealed class TestScaffold
{
/// <summary>
/// The incident this test validates.
/// </summary>
[JsonPropertyName("metadata")]
public required IncidentMetadata Metadata { get; init; }
/// <summary>
/// Suggested test class name.
/// </summary>
[JsonPropertyName("testClassName")]
public required string TestClassName { get; init; }
/// <summary>
/// Suggested test method name.
/// </summary>
[JsonPropertyName("testMethodName")]
public required string TestMethodName { get; init; }
/// <summary>
/// Namespace for the generated test.
/// </summary>
[JsonPropertyName("namespace")]
public string Namespace { get; init; } = "StellaOps.Tests.PostIncident";
/// <summary>
/// Input fixtures required for the test (serialized as JSON).
/// </summary>
[JsonPropertyName("inputFixtures")]
public Dictionary<string, string> InputFixtures { get; init; } = [];
/// <summary>
/// Expected outputs to assert (serialized as JSON or hash).
/// </summary>
[JsonPropertyName("expectedOutputs")]
public Dictionary<string, string> ExpectedOutputs { get; init; } = [];
/// <summary>
/// Hash of the replay manifest used to generate this scaffold.
/// </summary>
[JsonPropertyName("replayManifestHash")]
public string ReplayManifestHash { get; init; } = "";
/// <summary>
/// When this scaffold was generated.
/// </summary>
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// Test categories/traits to apply.
/// </summary>
[JsonPropertyName("traits")]
public Dictionary<string, string> Traits { get; init; } = [];
/// <summary>
/// Comments or notes for the test implementer.
/// </summary>
[JsonPropertyName("implementationNotes")]
public List<string> ImplementationNotes { get; init; } = [];
/// <summary>
/// Generates C# test code from this scaffold.
/// </summary>
public string GenerateTestCode()
{
var sb = new StringBuilder();
// File header
sb.AppendLine("// -----------------------------------------------------------------------------");
sb.AppendLine($"// {TestClassName}.cs");
sb.AppendLine($"// Post-Incident Regression Test: {Metadata.IncidentId}");
sb.AppendLine($"// Generated: {GeneratedAt:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine($"// Root Cause: {Metadata.RootCause}");
sb.AppendLine("// -----------------------------------------------------------------------------");
sb.AppendLine();
// Usings
sb.AppendLine("using FluentAssertions;");
sb.AppendLine("using StellaOps.TestKit;");
sb.AppendLine("using StellaOps.TestKit.Incident;");
sb.AppendLine("using Xunit;");
sb.AppendLine();
// Namespace and class
sb.AppendLine($"namespace {Namespace};");
sb.AppendLine();
sb.AppendLine("/// <summary>");
sb.AppendLine($"/// Regression test for incident {Metadata.IncidentId}: {Metadata.Title}");
sb.AppendLine("/// </summary>");
sb.AppendLine("/// <remarks>");
sb.AppendLine($"/// Root cause: {Metadata.RootCause}");
sb.AppendLine($"/// Affected modules: {string.Join(", ", Metadata.AffectedModules)}");
sb.AppendLine($"/// Severity: {Metadata.Severity}");
if (!string.IsNullOrEmpty(Metadata.ReportUrl))
{
sb.AppendLine($"/// Report: {Metadata.ReportUrl}");
}
sb.AppendLine("/// </remarks>");
sb.AppendLine("[Trait(\"Category\", TestCategories.PostIncident)]");
sb.AppendLine($"[Trait(\"Incident\", \"{Metadata.IncidentId}\")]");
sb.AppendLine($"[Trait(\"Severity\", \"{Metadata.Severity}\")]");
foreach (var trait in Traits)
{
sb.AppendLine($"[Trait(\"{trait.Key}\", \"{trait.Value}\")]");
}
sb.AppendLine($"public sealed class {TestClassName}");
sb.AppendLine("{");
// Metadata constant
sb.AppendLine(" private static readonly IncidentMetadata Incident = new()");
sb.AppendLine(" {");
sb.AppendLine($" IncidentId = \"{Metadata.IncidentId}\",");
sb.AppendLine($" OccurredAt = DateTimeOffset.Parse(\"{Metadata.OccurredAt:O}\"),");
sb.AppendLine($" RootCause = \"{EscapeString(Metadata.RootCause)}\",");
sb.AppendLine($" AffectedModules = [{string.Join(", ", Metadata.AffectedModules.Select(m => $"\"{m}\""))}],");
sb.AppendLine($" Severity = IncidentSeverity.{Metadata.Severity},");
sb.AppendLine($" Title = \"{EscapeString(Metadata.Title)}\"");
sb.AppendLine(" };");
sb.AppendLine();
// Test method
sb.AppendLine(" /// <summary>");
sb.AppendLine($" /// Validates that the fix for {Metadata.IncidentId} prevents recurrence.");
sb.AppendLine(" /// </summary>");
sb.AppendLine(" [Fact]");
sb.AppendLine($" public async Task {TestMethodName}()");
sb.AppendLine(" {");
sb.AppendLine(" // Arrange");
sb.AppendLine(" // TODO: Load fixtures from replay manifest");
foreach (var fixture in InputFixtures)
{
sb.AppendLine($" // Fixture: {fixture.Key}");
}
sb.AppendLine();
sb.AppendLine(" // Act");
sb.AppendLine(" // TODO: Execute the scenario that triggered the incident");
sb.AppendLine();
sb.AppendLine(" // Assert");
sb.AppendLine(" // TODO: Verify the fix prevents the incident condition");
foreach (var expected in ExpectedOutputs)
{
sb.AppendLine($" // Expected: {expected.Key}");
}
sb.AppendLine();
sb.AppendLine(" // This test was auto-generated. Review and complete the implementation.");
sb.AppendLine(" await Task.CompletedTask;");
sb.AppendLine(" }");
sb.AppendLine("}");
// Implementation notes as comments
if (ImplementationNotes.Count > 0)
{
sb.AppendLine();
sb.AppendLine("/*");
sb.AppendLine("Implementation Notes:");
foreach (var note in ImplementationNotes)
{
sb.AppendLine($"- {note}");
}
sb.AppendLine("*/");
}
return sb.ToString();
}
/// <summary>
/// Serializes this scaffold to JSON for storage.
/// </summary>
public string ToJson()
{
var options = new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
Converters = { new JsonStringEnumConverter() }
};
return JsonSerializer.Serialize(this, options);
}
/// <summary>
/// Deserializes a scaffold from JSON.
/// </summary>
public static TestScaffold? FromJson(string json)
{
var options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
Converters = { new JsonStringEnumConverter() }
};
return JsonSerializer.Deserialize<TestScaffold>(json, options);
}
private static string EscapeString(string value)
{
return value
.Replace("\\", "\\\\")
.Replace("\"", "\\\"")
.Replace("\n", "\\n")
.Replace("\r", "\\r")
.Replace("\t", "\\t");
}
}

View File

@@ -0,0 +1,352 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Interop;
/// <summary>
/// Tracks schema versions and analyzes compatibility between versions.
/// </summary>
/// <remarks>
/// The matrix helps verify N-1/N+1 version compatibility:
/// - Current code with N-1 schema (backward compatibility)
/// - N-1 code with current schema (forward compatibility)
///
/// Usage:
/// <code>
/// var matrix = new SchemaVersionMatrix();
/// matrix.AddVersion("1.0", new SchemaDefinition
/// {
/// RequiredFields = ["id", "name"],
/// OptionalFields = ["description"]
/// });
/// matrix.AddVersion("2.0", new SchemaDefinition
/// {
/// RequiredFields = ["id", "name", "type"],
/// OptionalFields = ["description", "metadata"]
/// });
///
/// var report = matrix.Analyze();
/// Assert.True(report.IsBackwardCompatible("2.0", "1.0"));
/// </code>
/// </remarks>
public sealed class SchemaVersionMatrix
{
private readonly Dictionary<string, SchemaDefinition> _versions = new(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Adds a schema version to the matrix.
/// </summary>
/// <param name="version">Version identifier (e.g., "1.0", "2.0").</param>
/// <param name="schema">Schema definition.</param>
public void AddVersion(string version, SchemaDefinition schema)
{
ArgumentNullException.ThrowIfNull(version);
ArgumentNullException.ThrowIfNull(schema);
_versions[version] = schema;
}
/// <summary>
/// Gets all registered version identifiers.
/// </summary>
public IReadOnlyCollection<string> Versions => _versions.Keys.ToList();
/// <summary>
/// Gets a schema definition by version.
/// </summary>
public SchemaDefinition? GetVersion(string version)
{
return _versions.TryGetValue(version, out var schema) ? schema : null;
}
/// <summary>
/// Analyzes compatibility between all registered versions.
/// </summary>
public CompatibilityReport Analyze()
{
var versionList = _versions.Keys.OrderBy(v => v).ToList();
var pairs = new List<VersionCompatibilityPair>();
for (int i = 0; i < versionList.Count; i++)
{
for (int j = 0; j < versionList.Count; j++)
{
if (i == j) continue;
var fromVersion = versionList[i];
var toVersion = versionList[j];
var backward = CheckBackwardCompatibility(fromVersion, toVersion);
var forward = CheckForwardCompatibility(fromVersion, toVersion);
pairs.Add(new VersionCompatibilityPair
{
FromVersion = fromVersion,
ToVersion = toVersion,
IsBackwardCompatible = backward.IsCompatible,
IsForwardCompatible = forward.IsCompatible,
BackwardIssues = backward.Issues,
ForwardIssues = forward.Issues
});
}
}
return new CompatibilityReport
{
GeneratedAt = DateTimeOffset.UtcNow,
Versions = versionList,
Pairs = pairs,
OverallBackwardCompatible = pairs.All(p => p.IsBackwardCompatible),
OverallForwardCompatible = pairs.All(p => p.IsForwardCompatible)
};
}
/// <summary>
/// Checks if upgrading from one version to another is backward compatible.
/// </summary>
/// <remarks>
/// Backward compatible means old code can read new data without errors.
/// This requires that new versions don't remove required fields.
/// </remarks>
public bool IsBackwardCompatible(string fromVersion, string toVersion)
{
return CheckBackwardCompatibility(fromVersion, toVersion).IsCompatible;
}
/// <summary>
/// Checks if new code can read old data (forward compatibility).
/// </summary>
/// <remarks>
/// Forward compatible means new code can handle old data gracefully.
/// This requires that new required fields have defaults or are additive.
/// </remarks>
public bool IsForwardCompatible(string fromVersion, string toVersion)
{
return CheckForwardCompatibility(fromVersion, toVersion).IsCompatible;
}
private CompatibilityCheckResult CheckBackwardCompatibility(string fromVersion, string toVersion)
{
if (!_versions.TryGetValue(fromVersion, out var fromSchema) ||
!_versions.TryGetValue(toVersion, out var toSchema))
{
return new CompatibilityCheckResult(false, [$"Version not found: {fromVersion} or {toVersion}"]);
}
var issues = new List<string>();
// Check if any required fields from old version are removed
var removedRequiredFields = fromSchema.RequiredFields
.Except(toSchema.RequiredFields)
.Except(toSchema.OptionalFields)
.ToList();
if (removedRequiredFields.Count > 0)
{
issues.Add($"Required fields removed: {string.Join(", ", removedRequiredFields)}");
}
// Check for type changes
foreach (var (field, oldType) in fromSchema.FieldTypes)
{
if (toSchema.FieldTypes.TryGetValue(field, out var newType) && oldType != newType)
{
issues.Add($"Type changed for '{field}': {oldType} -> {newType}");
}
}
return new CompatibilityCheckResult(issues.Count == 0, issues);
}
private CompatibilityCheckResult CheckForwardCompatibility(string fromVersion, string toVersion)
{
if (!_versions.TryGetValue(fromVersion, out var fromSchema) ||
!_versions.TryGetValue(toVersion, out var toSchema))
{
return new CompatibilityCheckResult(false, [$"Version not found: {fromVersion} or {toVersion}"]);
}
var issues = new List<string>();
// Check if new version adds required fields not present in old version
var newRequiredFields = toSchema.RequiredFields
.Except(fromSchema.RequiredFields)
.Except(fromSchema.OptionalFields)
.ToList();
if (newRequiredFields.Count > 0)
{
// New required fields need defaults for forward compatibility
var fieldsWithoutDefaults = newRequiredFields
.Where(f => !toSchema.FieldDefaults.ContainsKey(f))
.ToList();
if (fieldsWithoutDefaults.Count > 0)
{
issues.Add($"New required fields without defaults: {string.Join(", ", fieldsWithoutDefaults)}");
}
}
return new CompatibilityCheckResult(issues.Count == 0, issues);
}
private sealed record CompatibilityCheckResult(bool IsCompatible, List<string> Issues);
}
/// <summary>
/// Definition of a schema version.
/// </summary>
public sealed class SchemaDefinition
{
/// <summary>
/// Fields that must be present.
/// </summary>
public List<string> RequiredFields { get; init; } = [];
/// <summary>
/// Fields that may be present but are not required.
/// </summary>
public List<string> OptionalFields { get; init; } = [];
/// <summary>
/// Field types for type compatibility checking.
/// </summary>
public Dictionary<string, string> FieldTypes { get; init; } = [];
/// <summary>
/// Default values for fields (enables forward compatibility).
/// </summary>
public Dictionary<string, object?> FieldDefaults { get; init; } = [];
/// <summary>
/// Version-specific validation rules.
/// </summary>
public List<string> ValidationRules { get; init; } = [];
}
/// <summary>
/// Report on schema version compatibility.
/// </summary>
public sealed class CompatibilityReport
{
/// <summary>
/// When the report was generated.
/// </summary>
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// All analyzed versions.
/// </summary>
[JsonPropertyName("versions")]
public List<string> Versions { get; init; } = [];
/// <summary>
/// Compatibility analysis for each version pair.
/// </summary>
[JsonPropertyName("pairs")]
public List<VersionCompatibilityPair> Pairs { get; init; } = [];
/// <summary>
/// True if all version transitions are backward compatible.
/// </summary>
[JsonPropertyName("overallBackwardCompatible")]
public bool OverallBackwardCompatible { get; init; }
/// <summary>
/// True if all version transitions are forward compatible.
/// </summary>
[JsonPropertyName("overallForwardCompatible")]
public bool OverallForwardCompatible { get; init; }
/// <summary>
/// Generates a Markdown summary of the report.
/// </summary>
public string ToMarkdown()
{
var sb = new StringBuilder();
sb.AppendLine("# Schema Compatibility Report");
sb.AppendLine();
sb.AppendLine($"**Generated:** {GeneratedAt:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine($"**Versions Analyzed:** {string.Join(", ", Versions)}");
sb.AppendLine($"**Overall Backward Compatible:** {(OverallBackwardCompatible ? "Yes" : "No")}");
sb.AppendLine($"**Overall Forward Compatible:** {(OverallForwardCompatible ? "Yes" : "No")}");
sb.AppendLine();
sb.AppendLine("## Compatibility Matrix");
sb.AppendLine();
sb.AppendLine("| From | To | Backward | Forward | Issues |");
sb.AppendLine("|------|-----|----------|---------|--------|");
foreach (var pair in Pairs)
{
var issues = pair.BackwardIssues.Concat(pair.ForwardIssues).ToList();
var issueText = issues.Count > 0 ? string.Join("; ", issues.Take(2)) : "-";
if (issues.Count > 2) issueText += $" (+{issues.Count - 2} more)";
sb.AppendLine($"| {pair.FromVersion} | {pair.ToVersion} | " +
$"{(pair.IsBackwardCompatible ? "" : "")} | " +
$"{(pair.IsForwardCompatible ? "" : "")} | " +
$"{issueText} |");
}
return sb.ToString();
}
/// <summary>
/// Serializes the report to JSON.
/// </summary>
public string ToJson()
{
var options = new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
return JsonSerializer.Serialize(this, options);
}
}
/// <summary>
/// Compatibility analysis between two versions.
/// </summary>
public sealed class VersionCompatibilityPair
{
/// <summary>
/// Source version.
/// </summary>
[JsonPropertyName("fromVersion")]
public string FromVersion { get; init; } = "";
/// <summary>
/// Target version.
/// </summary>
[JsonPropertyName("toVersion")]
public string ToVersion { get; init; } = "";
/// <summary>
/// True if old code can read new data.
/// </summary>
[JsonPropertyName("isBackwardCompatible")]
public bool IsBackwardCompatible { get; init; }
/// <summary>
/// True if new code can read old data.
/// </summary>
[JsonPropertyName("isForwardCompatible")]
public bool IsForwardCompatible { get; init; }
/// <summary>
/// Issues preventing backward compatibility.
/// </summary>
[JsonPropertyName("backwardIssues")]
public List<string> BackwardIssues { get; init; } = [];
/// <summary>
/// Issues preventing forward compatibility.
/// </summary>
[JsonPropertyName("forwardIssues")]
public List<string> ForwardIssues { get; init; } = [];
}

View File

@@ -0,0 +1,409 @@
using Xunit;
namespace StellaOps.TestKit.Interop;
/// <summary>
/// Fixture for testing compatibility across service versions.
/// </summary>
/// <remarks>
/// Enables N-1/N+1 version compatibility testing:
/// - Current client with N-1 server
/// - N-1 client with current server
///
/// Usage:
/// <code>
/// public class VersionCompatibilityTests : IClassFixture&lt;VersionCompatibilityFixture&gt;
/// {
/// private readonly VersionCompatibilityFixture _fixture;
///
/// [Fact]
/// [Trait("Category", TestCategories.Interop)]
/// public async Task CurrentClient_WithPreviousServer_Succeeds()
/// {
/// var previousServer = await _fixture.StartVersion("1.0", "EvidenceLocker");
/// var result = await _fixture.TestHandshake(
/// currentClient: _fixture.CurrentEndpoint,
/// targetServer: previousServer);
///
/// result.IsSuccess.Should().BeTrue();
/// }
/// }
/// </code>
/// </remarks>
public sealed class VersionCompatibilityFixture : IAsyncLifetime
{
private readonly Dictionary<string, ServiceEndpoint> _runningServices = [];
private readonly List<IAsyncDisposable> _disposables = [];
/// <summary>
/// Configuration for the fixture.
/// </summary>
public VersionCompatibilityConfig Config { get; init; } = new();
/// <summary>
/// The current version endpoint (from the test assembly).
/// </summary>
public ServiceEndpoint? CurrentEndpoint { get; private set; }
/// <summary>
/// Starts a specific version of a service.
/// </summary>
/// <param name="version">Version identifier (e.g., "1.0", "2.0").</param>
/// <param name="serviceName">Name of the service to start.</param>
/// <returns>Endpoint for the running service.</returns>
public async Task<ServiceEndpoint> StartVersion(string version, string serviceName)
{
ArgumentNullException.ThrowIfNull(version);
ArgumentNullException.ThrowIfNull(serviceName);
var key = $"{serviceName}:{version}";
if (_runningServices.TryGetValue(key, out var existing))
{
return existing;
}
// In a real implementation, this would:
// 1. Pull the Docker image for the specified version
// 2. Start a Testcontainer with that version
// 3. Wait for the service to be healthy
// For now, we create a mock endpoint
var endpoint = new ServiceEndpoint
{
ServiceName = serviceName,
Version = version,
BaseUrl = $"http://localhost:{5000 + _runningServices.Count}",
IsHealthy = true,
StartedAt = DateTimeOffset.UtcNow
};
_runningServices[key] = endpoint;
return endpoint;
}
/// <summary>
/// Tests compatibility between two endpoints.
/// </summary>
/// <param name="currentClient">The client endpoint.</param>
/// <param name="targetServer">The server endpoint to connect to.</param>
/// <returns>Result of the compatibility test.</returns>
public async Task<CompatibilityResult> TestHandshake(ServiceEndpoint currentClient, ServiceEndpoint targetServer)
{
ArgumentNullException.ThrowIfNull(currentClient);
ArgumentNullException.ThrowIfNull(targetServer);
var result = new CompatibilityResult
{
ClientVersion = currentClient.Version,
ServerVersion = targetServer.Version,
TestedAt = DateTimeOffset.UtcNow
};
try
{
// In a real implementation, this would:
// 1. Send test requests from client to server
// 2. Verify responses are correctly parsed
// 3. Check for deprecation warnings
// 4. Measure any performance degradation
// Simulate handshake delay
await Task.Delay(10);
result.IsSuccess = true;
result.Message = $"Handshake successful: {currentClient.Version} -> {targetServer.Version}";
}
catch (Exception ex)
{
result.IsSuccess = false;
result.Message = $"Handshake failed: {ex.Message}";
result.Errors.Add(ex.Message);
}
return result;
}
/// <summary>
/// Tests message format compatibility.
/// </summary>
/// <param name="producer">The message producer endpoint.</param>
/// <param name="consumer">The message consumer endpoint.</param>
/// <param name="messageType">Type of message to test.</param>
/// <returns>Result of the message compatibility test.</returns>
public async Task<CompatibilityResult> TestMessageFormat(
ServiceEndpoint producer,
ServiceEndpoint consumer,
string messageType)
{
ArgumentNullException.ThrowIfNull(producer);
ArgumentNullException.ThrowIfNull(consumer);
ArgumentNullException.ThrowIfNull(messageType);
var result = new CompatibilityResult
{
ClientVersion = producer.Version,
ServerVersion = consumer.Version,
TestedAt = DateTimeOffset.UtcNow
};
try
{
// In a real implementation, this would:
// 1. Have producer generate a test message
// 2. Send to consumer
// 3. Verify consumer can parse the message
// 4. Check for data loss or transformation issues
await Task.Delay(10);
result.IsSuccess = true;
result.Message = $"Message format compatible: {messageType} from {producer.Version} to {consumer.Version}";
}
catch (Exception ex)
{
result.IsSuccess = false;
result.Message = $"Message format incompatible: {ex.Message}";
result.Errors.Add(ex.Message);
}
return result;
}
/// <summary>
/// Tests schema migration compatibility.
/// </summary>
/// <param name="fromVersion">Source schema version.</param>
/// <param name="toVersion">Target schema version.</param>
/// <param name="testData">Sample data to migrate.</param>
/// <returns>Result of the migration test.</returns>
public async Task<MigrationTestResult> TestSchemaMigration(
string fromVersion,
string toVersion,
object testData)
{
ArgumentNullException.ThrowIfNull(fromVersion);
ArgumentNullException.ThrowIfNull(toVersion);
ArgumentNullException.ThrowIfNull(testData);
var result = new MigrationTestResult
{
FromVersion = fromVersion,
ToVersion = toVersion,
TestedAt = DateTimeOffset.UtcNow
};
try
{
// In a real implementation, this would:
// 1. Apply migration scripts from fromVersion to toVersion
// 2. Verify data integrity after migration
// 3. Check for rollback capability
// 4. Measure migration performance
await Task.Delay(10);
result.IsSuccess = true;
result.Message = $"Migration successful: {fromVersion} -> {toVersion}";
result.DataPreserved = true;
result.RollbackSupported = true;
}
catch (Exception ex)
{
result.IsSuccess = false;
result.Message = $"Migration failed: {ex.Message}";
result.Errors.Add(ex.Message);
}
return result;
}
/// <summary>
/// Stops a running service version.
/// </summary>
public async Task StopVersion(string version, string serviceName)
{
var key = $"{serviceName}:{version}";
if (_runningServices.Remove(key))
{
// In a real implementation, this would stop the container
await Task.Delay(1);
}
}
/// <inheritdoc />
public async ValueTask InitializeAsync()
{
// Initialize current version endpoint
CurrentEndpoint = new ServiceEndpoint
{
ServiceName = "Current",
Version = Config.CurrentVersion,
BaseUrl = "http://localhost:5000",
IsHealthy = true,
StartedAt = DateTimeOffset.UtcNow
};
await Task.CompletedTask;
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
_runningServices.Clear();
foreach (var disposable in _disposables)
{
await disposable.DisposeAsync();
}
_disposables.Clear();
}
}
/// <summary>
/// Configuration for version compatibility testing.
/// </summary>
public sealed class VersionCompatibilityConfig
{
/// <summary>
/// The current version being tested.
/// </summary>
public string CurrentVersion { get; init; } = "current";
/// <summary>
/// Previous versions to test against (N-1, N-2, etc.).
/// </summary>
public List<string> PreviousVersions { get; init; } = [];
/// <summary>
/// Docker image registry for pulling version images.
/// </summary>
public string ImageRegistry { get; init; } = "";
/// <summary>
/// Timeout for starting a service version.
/// </summary>
public TimeSpan StartupTimeout { get; init; } = TimeSpan.FromSeconds(60);
/// <summary>
/// Timeout for handshake tests.
/// </summary>
public TimeSpan HandshakeTimeout { get; init; } = TimeSpan.FromSeconds(10);
}
/// <summary>
/// Represents a running service endpoint.
/// </summary>
public sealed class ServiceEndpoint
{
/// <summary>
/// Name of the service.
/// </summary>
public string ServiceName { get; init; } = "";
/// <summary>
/// Version of the service.
/// </summary>
public string Version { get; init; } = "";
/// <summary>
/// Base URL for the service.
/// </summary>
public string BaseUrl { get; init; } = "";
/// <summary>
/// Whether the service is currently healthy.
/// </summary>
public bool IsHealthy { get; init; }
/// <summary>
/// When the service was started.
/// </summary>
public DateTimeOffset StartedAt { get; init; }
}
/// <summary>
/// Result of a compatibility test.
/// </summary>
public sealed class CompatibilityResult
{
/// <summary>
/// Client version tested.
/// </summary>
public string ClientVersion { get; init; } = "";
/// <summary>
/// Server version tested.
/// </summary>
public string ServerVersion { get; init; } = "";
/// <summary>
/// Whether the test succeeded.
/// </summary>
public bool IsSuccess { get; set; }
/// <summary>
/// Summary message.
/// </summary>
public string Message { get; set; } = "";
/// <summary>
/// Errors encountered during testing.
/// </summary>
public List<string> Errors { get; init; } = [];
/// <summary>
/// Warnings (e.g., deprecation notices).
/// </summary>
public List<string> Warnings { get; init; } = [];
/// <summary>
/// When the test was performed.
/// </summary>
public DateTimeOffset TestedAt { get; init; }
}
/// <summary>
/// Result of a schema migration test.
/// </summary>
public sealed class MigrationTestResult
{
/// <summary>
/// Source schema version.
/// </summary>
public string FromVersion { get; init; } = "";
/// <summary>
/// Target schema version.
/// </summary>
public string ToVersion { get; init; } = "";
/// <summary>
/// Whether the migration succeeded.
/// </summary>
public bool IsSuccess { get; set; }
/// <summary>
/// Summary message.
/// </summary>
public string Message { get; set; } = "";
/// <summary>
/// Whether all data was preserved after migration.
/// </summary>
public bool DataPreserved { get; set; }
/// <summary>
/// Whether rollback is supported.
/// </summary>
public bool RollbackSupported { get; set; }
/// <summary>
/// Errors encountered during migration.
/// </summary>
public List<string> Errors { get; init; } = [];
/// <summary>
/// When the test was performed.
/// </summary>
public DateTimeOffset TestedAt { get; init; }
}

View File

@@ -0,0 +1,383 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.TestKit.Longevity;
/// <summary>
/// Captures stability metrics during long-running tests.
/// </summary>
/// <remarks>
/// Tracks memory usage, connection pools, and counters to detect:
/// - Memory leaks (growing memory over time)
/// - Connection pool exhaustion
/// - Counter drift (unbounded growth)
/// - Resource leaks
///
/// Usage:
/// <code>
/// var metrics = new StabilityMetrics();
/// metrics.CaptureBaseline();
///
/// // Run long-duration operations
/// for (int i = 0; i < 100000; i++)
/// {
/// await ProcessWorkItem();
/// if (i % 1000 == 0) metrics.CaptureSnapshot();
/// }
///
/// metrics.CaptureSnapshot();
/// Assert.False(metrics.HasMemoryLeak(tolerancePercent: 10));
/// </code>
/// </remarks>
public sealed class StabilityMetrics
{
private readonly List<MetricsSnapshot> _snapshots = [];
private MetricsSnapshot? _baseline;
/// <summary>
/// Memory usage baseline (bytes).
/// </summary>
public long MemoryBaseline => _baseline?.MemoryUsed ?? 0;
/// <summary>
/// Current memory usage (bytes).
/// </summary>
public long MemoryCurrent => _snapshots.LastOrDefault()?.MemoryUsed ?? 0;
/// <summary>
/// Memory growth rate (bytes per snapshot).
/// </summary>
public double MemoryGrowthRate
{
get
{
if (_snapshots.Count < 2) return 0;
// Calculate linear regression slope
var n = _snapshots.Count;
var sumX = 0.0;
var sumY = 0.0;
var sumXY = 0.0;
var sumX2 = 0.0;
for (int i = 0; i < n; i++)
{
sumX += i;
sumY += _snapshots[i].MemoryUsed;
sumXY += i * _snapshots[i].MemoryUsed;
sumX2 += i * i;
}
var denominator = n * sumX2 - sumX * sumX;
if (Math.Abs(denominator) < 0.0001) return 0;
return (n * sumXY - sumX * sumY) / denominator;
}
}
/// <summary>
/// Active connections in pools.
/// </summary>
public int ConnectionPoolActive => _snapshots.LastOrDefault()?.ConnectionPoolActive ?? 0;
/// <summary>
/// Potentially leaked connections.
/// </summary>
public int ConnectionPoolLeaked => _snapshots.LastOrDefault()?.ConnectionPoolLeaked ?? 0;
/// <summary>
/// Counter values by name.
/// </summary>
public Dictionary<string, long> CounterValues => _snapshots.LastOrDefault()?.Counters
?? new Dictionary<string, long>();
/// <summary>
/// All captured snapshots.
/// </summary>
public IReadOnlyList<MetricsSnapshot> Snapshots => _snapshots;
/// <summary>
/// Captures the initial baseline metrics.
/// </summary>
public void CaptureBaseline()
{
// Force GC to get accurate baseline
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
_baseline = CaptureCurrentSnapshot();
_snapshots.Clear();
_snapshots.Add(_baseline);
}
/// <summary>
/// Captures a metrics snapshot.
/// </summary>
public void CaptureSnapshot()
{
_snapshots.Add(CaptureCurrentSnapshot());
}
/// <summary>
/// Records a counter value.
/// </summary>
public void RecordCounter(string name, long value)
{
var current = _snapshots.LastOrDefault();
if (current != null)
{
current.Counters[name] = value;
}
}
/// <summary>
/// Records connection pool metrics.
/// </summary>
public void RecordConnectionPool(int active, int leaked)
{
var current = _snapshots.LastOrDefault();
if (current != null)
{
current.ConnectionPoolActive = active;
current.ConnectionPoolLeaked = leaked;
}
}
/// <summary>
/// Checks if there's a memory leak based on growth trend.
/// </summary>
/// <param name="tolerancePercent">Allowed growth percentage from baseline.</param>
public bool HasMemoryLeak(double tolerancePercent = 10)
{
if (_baseline == null || _snapshots.Count < 2) return false;
var currentMemory = MemoryCurrent;
var baselineMemory = MemoryBaseline;
var allowedGrowth = baselineMemory * (tolerancePercent / 100);
// Check if current memory exceeds baseline by more than tolerance
var exceeds = currentMemory > baselineMemory + allowedGrowth;
// Also check if there's a consistent upward trend
var hasUpwardTrend = MemoryGrowthRate > 0;
return exceeds && hasUpwardTrend;
}
/// <summary>
/// Checks if a counter is drifting (growing unbounded).
/// </summary>
public bool HasDrift(string counterName, double threshold = 1000)
{
if (_snapshots.Count < 2) return false;
var values = _snapshots
.Where(s => s.Counters.ContainsKey(counterName))
.Select(s => s.Counters[counterName])
.ToList();
if (values.Count < 2) return false;
// Check if the counter is monotonically increasing beyond threshold
var first = values.First();
var last = values.Last();
var growth = last - first;
return growth > threshold;
}
/// <summary>
/// Checks if connection pool has leaks.
/// </summary>
public bool HasConnectionPoolLeak(int maxLeaked = 0)
{
return ConnectionPoolLeaked > maxLeaked;
}
/// <summary>
/// Generates a stability report.
/// </summary>
public StabilityReport GenerateReport()
{
return new StabilityReport
{
GeneratedAt = DateTimeOffset.UtcNow,
SnapshotCount = _snapshots.Count,
BaselineMemory = MemoryBaseline,
CurrentMemory = MemoryCurrent,
MemoryGrowthRate = MemoryGrowthRate,
ConnectionPoolActive = ConnectionPoolActive,
ConnectionPoolLeaked = ConnectionPoolLeaked,
Counters = new Dictionary<string, long>(CounterValues),
HasMemoryLeak = HasMemoryLeak(),
HasConnectionPoolLeak = HasConnectionPoolLeak(),
DriftingCounters = CounterValues.Keys.Where(k => HasDrift(k)).ToList()
};
}
private static MetricsSnapshot CaptureCurrentSnapshot()
{
return new MetricsSnapshot
{
CapturedAt = DateTimeOffset.UtcNow,
MemoryUsed = GC.GetTotalMemory(forceFullCollection: false),
Gen0Collections = GC.CollectionCount(0),
Gen1Collections = GC.CollectionCount(1),
Gen2Collections = GC.CollectionCount(2),
ThreadCount = global::System.Environment.ProcessorCount, // Simplified
Counters = new Dictionary<string, long>()
};
}
}
/// <summary>
/// A snapshot of metrics at a point in time.
/// </summary>
public sealed class MetricsSnapshot
{
/// <summary>
/// When the snapshot was captured.
/// </summary>
[JsonPropertyName("capturedAt")]
public DateTimeOffset CapturedAt { get; init; }
/// <summary>
/// Total memory used (bytes).
/// </summary>
[JsonPropertyName("memoryUsed")]
public long MemoryUsed { get; init; }
/// <summary>
/// Gen 0 GC collections since process start.
/// </summary>
[JsonPropertyName("gen0Collections")]
public int Gen0Collections { get; init; }
/// <summary>
/// Gen 1 GC collections since process start.
/// </summary>
[JsonPropertyName("gen1Collections")]
public int Gen1Collections { get; init; }
/// <summary>
/// Gen 2 GC collections since process start.
/// </summary>
[JsonPropertyName("gen2Collections")]
public int Gen2Collections { get; init; }
/// <summary>
/// Thread count.
/// </summary>
[JsonPropertyName("threadCount")]
public int ThreadCount { get; init; }
/// <summary>
/// Active connections in pool.
/// </summary>
[JsonPropertyName("connectionPoolActive")]
public int ConnectionPoolActive { get; set; }
/// <summary>
/// Leaked connections in pool.
/// </summary>
[JsonPropertyName("connectionPoolLeaked")]
public int ConnectionPoolLeaked { get; set; }
/// <summary>
/// Counter values.
/// </summary>
[JsonPropertyName("counters")]
public Dictionary<string, long> Counters { get; init; } = [];
}
/// <summary>
/// Stability analysis report.
/// </summary>
public sealed class StabilityReport
{
/// <summary>
/// When the report was generated.
/// </summary>
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Number of snapshots captured.
/// </summary>
[JsonPropertyName("snapshotCount")]
public int SnapshotCount { get; init; }
/// <summary>
/// Baseline memory (bytes).
/// </summary>
[JsonPropertyName("baselineMemory")]
public long BaselineMemory { get; init; }
/// <summary>
/// Current memory (bytes).
/// </summary>
[JsonPropertyName("currentMemory")]
public long CurrentMemory { get; init; }
/// <summary>
/// Memory growth rate (bytes per snapshot).
/// </summary>
[JsonPropertyName("memoryGrowthRate")]
public double MemoryGrowthRate { get; init; }
/// <summary>
/// Active connections.
/// </summary>
[JsonPropertyName("connectionPoolActive")]
public int ConnectionPoolActive { get; init; }
/// <summary>
/// Leaked connections.
/// </summary>
[JsonPropertyName("connectionPoolLeaked")]
public int ConnectionPoolLeaked { get; init; }
/// <summary>
/// Counter values.
/// </summary>
[JsonPropertyName("counters")]
public Dictionary<string, long> Counters { get; init; } = [];
/// <summary>
/// Whether a memory leak was detected.
/// </summary>
[JsonPropertyName("hasMemoryLeak")]
public bool HasMemoryLeak { get; init; }
/// <summary>
/// Whether a connection pool leak was detected.
/// </summary>
[JsonPropertyName("hasConnectionPoolLeak")]
public bool HasConnectionPoolLeak { get; init; }
/// <summary>
/// Counters that are drifting.
/// </summary>
[JsonPropertyName("driftingCounters")]
public List<string> DriftingCounters { get; init; } = [];
/// <summary>
/// Overall pass/fail status.
/// </summary>
[JsonIgnore]
public bool Passed => !HasMemoryLeak && !HasConnectionPoolLeak && DriftingCounters.Count == 0;
/// <summary>
/// Serializes the report to JSON.
/// </summary>
public string ToJson()
{
var options = new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
return JsonSerializer.Serialize(this, options);
}
}

View File

@@ -0,0 +1,231 @@
namespace StellaOps.TestKit.Longevity;
/// <summary>
/// Runner for time-extended stability tests.
/// </summary>
/// <remarks>
/// Executes test scenarios over extended periods to detect:
/// - Memory leaks
/// - Connection pool exhaustion
/// - Counter drift
/// - Resource leaks
///
/// Usage:
/// <code>
/// var runner = new StabilityTestRunner();
/// var report = await runner.RunExtended(
/// scenario: async () => await ProcessWorkItem(),
/// duration: TimeSpan.FromHours(1),
/// cancellationToken: ct);
///
/// Assert.True(report.Passed, report.ToJson());
/// </code>
/// </remarks>
public sealed class StabilityTestRunner
{
private readonly StabilityMetrics _metrics = new();
private readonly List<string> _errors = [];
private readonly List<string> _warnings = [];
/// <summary>
/// Configuration for the runner.
/// </summary>
public StabilityTestConfig Config { get; init; } = new();
/// <summary>
/// Runs a scenario for an extended duration, collecting stability metrics.
/// </summary>
/// <param name="scenario">The test scenario to execute repeatedly.</param>
/// <param name="duration">How long to run the test.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Stability report with pass/fail status.</returns>
public async Task<StabilityReport> RunExtended(
Func<Task> scenario,
TimeSpan duration,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(scenario);
var startTime = DateTimeOffset.UtcNow;
var endTime = startTime + duration;
var iterationCount = 0;
_errors.Clear();
_warnings.Clear();
// Capture baseline
_metrics.CaptureBaseline();
try
{
while (DateTimeOffset.UtcNow < endTime && !cancellationToken.IsCancellationRequested)
{
// Execute scenario
try
{
await scenario();
iterationCount++;
}
catch (Exception ex)
{
_errors.Add($"Iteration {iterationCount}: {ex.Message}");
if (Config.StopOnError)
{
break;
}
}
// Capture periodic snapshots
if (iterationCount % Config.SnapshotInterval == 0)
{
_metrics.CaptureSnapshot();
// Check for early warnings
CheckEarlyWarnings(iterationCount);
}
// Optional delay between iterations
if (Config.IterationDelay > TimeSpan.Zero)
{
await Task.Delay(Config.IterationDelay, cancellationToken);
}
}
}
catch (OperationCanceledException)
{
_warnings.Add("Test was cancelled before completion");
}
// Final snapshot
_metrics.CaptureSnapshot();
return GenerateReport(startTime, iterationCount);
}
/// <summary>
/// Runs a scenario for a specific number of iterations.
/// </summary>
public async Task<StabilityReport> RunIterations(
Func<Task> scenario,
int iterations,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(scenario);
var startTime = DateTimeOffset.UtcNow;
var completedIterations = 0;
_errors.Clear();
_warnings.Clear();
_metrics.CaptureBaseline();
for (int i = 0; i < iterations && !cancellationToken.IsCancellationRequested; i++)
{
try
{
await scenario();
completedIterations++;
}
catch (Exception ex)
{
_errors.Add($"Iteration {i}: {ex.Message}");
if (Config.StopOnError)
{
break;
}
}
if (completedIterations % Config.SnapshotInterval == 0)
{
_metrics.CaptureSnapshot();
CheckEarlyWarnings(completedIterations);
}
}
_metrics.CaptureSnapshot();
return GenerateReport(startTime, completedIterations);
}
/// <summary>
/// Gets the underlying metrics collector for advanced use cases.
/// </summary>
public StabilityMetrics Metrics => _metrics;
private void CheckEarlyWarnings(int iteration)
{
if (_metrics.HasMemoryLeak(Config.MemoryLeakThresholdPercent))
{
_warnings.Add($"Iteration {iteration}: Potential memory leak detected");
}
if (_metrics.HasConnectionPoolLeak(Config.MaxConnectionPoolLeaks))
{
_warnings.Add($"Iteration {iteration}: Connection pool leak detected");
}
foreach (var counter in _metrics.CounterValues.Keys)
{
if (_metrics.HasDrift(counter, Config.CounterDriftThreshold))
{
_warnings.Add($"Iteration {iteration}: Counter '{counter}' is drifting");
}
}
}
private StabilityReport GenerateReport(DateTimeOffset startTime, int iterations)
{
var baseReport = _metrics.GenerateReport();
// Enhance report with run metadata
return new StabilityReport
{
GeneratedAt = DateTimeOffset.UtcNow,
SnapshotCount = baseReport.SnapshotCount,
BaselineMemory = baseReport.BaselineMemory,
CurrentMemory = baseReport.CurrentMemory,
MemoryGrowthRate = baseReport.MemoryGrowthRate,
ConnectionPoolActive = baseReport.ConnectionPoolActive,
ConnectionPoolLeaked = baseReport.ConnectionPoolLeaked,
Counters = baseReport.Counters,
HasMemoryLeak = baseReport.HasMemoryLeak,
HasConnectionPoolLeak = baseReport.HasConnectionPoolLeak,
DriftingCounters = baseReport.DriftingCounters
};
}
}
/// <summary>
/// Configuration for stability test runs.
/// </summary>
public sealed class StabilityTestConfig
{
/// <summary>
/// How often to capture metrics (every N iterations).
/// </summary>
public int SnapshotInterval { get; init; } = 100;
/// <summary>
/// Memory growth threshold to consider a leak (percentage).
/// </summary>
public double MemoryLeakThresholdPercent { get; init; } = 10;
/// <summary>
/// Maximum allowed connection pool leaks.
/// </summary>
public int MaxConnectionPoolLeaks { get; init; } = 0;
/// <summary>
/// Counter value growth to consider drift.
/// </summary>
public double CounterDriftThreshold { get; init; } = 1000;
/// <summary>
/// Whether to stop on first error.
/// </summary>
public bool StopOnError { get; init; } = false;
/// <summary>
/// Delay between iterations.
/// </summary>
public TimeSpan IterationDelay { get; init; } = TimeSpan.Zero;
}

View File

@@ -0,0 +1,29 @@
namespace StellaOps.TestKit.Observability;
/// <summary>
/// Exception thrown when an observability contract assertion fails.
/// </summary>
/// <remarks>
/// Contract violations indicate that telemetry output doesn't conform to
/// expected schemas, cardinality limits, or data quality requirements.
/// </remarks>
public sealed class ContractViolationException : Exception
{
/// <summary>
/// Creates a new contract violation exception.
/// </summary>
/// <param name="message">Description of the contract violation.</param>
public ContractViolationException(string message) : base(message)
{
}
/// <summary>
/// Creates a new contract violation exception with an inner exception.
/// </summary>
/// <param name="message">Description of the contract violation.</param>
/// <param name="innerException">The underlying exception.</param>
public ContractViolationException(string message, Exception innerException)
: base(message, innerException)
{
}
}

View File

@@ -0,0 +1,242 @@
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.TestKit.Observability;
/// <summary>
/// Assertion helpers for structured logging contract testing.
/// </summary>
/// <remarks>
/// These assertions validate that log output conforms to expected contracts:
/// required fields, appropriate log levels, and no sensitive data leakage.
///
/// Usage:
/// <code>
/// var logCapture = new LogCapture();
/// await service.ProcessAsync();
///
/// LogContractAssert.HasRequiredFields(logCapture.Records[0], "CorrelationId", "TenantId");
/// LogContractAssert.NoSensitiveData(logCapture, piiPatterns);
/// LogContractAssert.LogLevelAppropriate(logCapture.Records[0], LogLevel.Information, LogLevel.Warning);
/// </code>
/// </remarks>
public static class LogContractAssert
{
/// <summary>
/// Asserts that a log record contains all required structured fields.
/// </summary>
/// <param name="record">The log record to check.</param>
/// <param name="fieldNames">Required field names that must be present in scope or state.</param>
/// <exception cref="ContractViolationException">Thrown when required fields are missing.</exception>
public static void HasRequiredFields(CapturedLogRecord record, params string[] fieldNames)
{
ArgumentNullException.ThrowIfNull(record);
ArgumentNullException.ThrowIfNull(fieldNames);
var presentFields = record.ScopeValues.Keys
.Concat(record.StateValues.Keys)
.ToHashSet(StringComparer.Ordinal);
var missing = fieldNames.Where(name => !presentFields.Contains(name)).ToList();
if (missing.Count > 0)
{
throw new ContractViolationException(
$"Log record missing required fields: [{string.Join(", ", missing)}]. " +
$"Present fields: [{string.Join(", ", presentFields)}]");
}
}
/// <summary>
/// Asserts that log records don't contain sensitive data matching provided patterns.
/// </summary>
/// <param name="records">Log records to check.</param>
/// <param name="piiPatterns">Regex patterns for sensitive data.</param>
/// <exception cref="ContractViolationException">Thrown when sensitive data is detected.</exception>
public static void NoSensitiveData(IEnumerable<CapturedLogRecord> records, IEnumerable<Regex> piiPatterns)
{
ArgumentNullException.ThrowIfNull(records);
ArgumentNullException.ThrowIfNull(piiPatterns);
var patternList = piiPatterns.ToList();
foreach (var record in records)
{
// Check message
foreach (var pattern in patternList)
{
if (record.Message != null && pattern.IsMatch(record.Message))
{
throw new ContractViolationException(
$"Potential PII in log message: pattern '{pattern}' matched in '{record.Message}'");
}
}
// Check state values
foreach (var (key, value) in record.StateValues)
{
if (value == null) continue;
var valueStr = value.ToString() ?? "";
foreach (var pattern in patternList)
{
if (pattern.IsMatch(valueStr))
{
throw new ContractViolationException(
$"Potential PII in log field '{key}': pattern '{pattern}' matched");
}
}
}
// Check exception message
if (record.Exception != null)
{
foreach (var pattern in patternList)
{
if (pattern.IsMatch(record.Exception.Message))
{
throw new ContractViolationException(
$"Potential PII in exception message: pattern '{pattern}' matched");
}
}
}
}
}
/// <summary>
/// Asserts that a log record's level is within the appropriate range.
/// </summary>
/// <param name="record">The log record to check.</param>
/// <param name="minLevel">Minimum acceptable log level.</param>
/// <param name="maxLevel">Maximum acceptable log level.</param>
/// <exception cref="ContractViolationException">Thrown when log level is outside range.</exception>
public static void LogLevelAppropriate(CapturedLogRecord record, LogLevel minLevel, LogLevel maxLevel)
{
ArgumentNullException.ThrowIfNull(record);
if (record.LogLevel < minLevel || record.LogLevel > maxLevel)
{
throw new ContractViolationException(
$"Log level {record.LogLevel} outside acceptable range [{minLevel}, {maxLevel}]. " +
$"Message: {record.Message}");
}
}
/// <summary>
/// Asserts that error logs have correlation context for troubleshooting.
/// </summary>
/// <param name="records">Log records to check.</param>
/// <param name="correlationFields">Fields that should be present on error logs (e.g., "CorrelationId", "RequestId").</param>
/// <exception cref="ContractViolationException">Thrown when error logs lack correlation context.</exception>
public static void ErrorLogsHaveCorrelation(IEnumerable<CapturedLogRecord> records, params string[] correlationFields)
{
ArgumentNullException.ThrowIfNull(records);
var errorRecords = records.Where(r => r.LogLevel >= LogLevel.Error).ToList();
foreach (var record in errorRecords)
{
var presentFields = record.ScopeValues.Keys
.Concat(record.StateValues.Keys)
.ToHashSet(StringComparer.Ordinal);
var missing = correlationFields.Where(f => !presentFields.Contains(f)).ToList();
if (missing.Count > 0)
{
throw new ContractViolationException(
$"Error log missing correlation context: [{string.Join(", ", missing)}]. " +
$"Message: {record.Message}");
}
}
}
/// <summary>
/// Asserts that log messages follow a consistent format pattern.
/// </summary>
/// <param name="records">Log records to check.</param>
/// <param name="formatPattern">Regex pattern for acceptable message format.</param>
/// <exception cref="ContractViolationException">Thrown when messages don't match pattern.</exception>
public static void MessagesMatchPattern(IEnumerable<CapturedLogRecord> records, Regex formatPattern)
{
ArgumentNullException.ThrowIfNull(records);
ArgumentNullException.ThrowIfNull(formatPattern);
foreach (var record in records)
{
if (record.Message != null && !formatPattern.IsMatch(record.Message))
{
throw new ContractViolationException(
$"Log message doesn't match format pattern: '{record.Message}'");
}
}
}
/// <summary>
/// Asserts that no logs at or above the specified level were emitted.
/// </summary>
/// <param name="records">Log records to check.</param>
/// <param name="maxAllowedLevel">Maximum log level that should be present.</param>
/// <exception cref="ContractViolationException">Thrown when logs exceed max level.</exception>
public static void NoLogsAboveLevel(IEnumerable<CapturedLogRecord> records, LogLevel maxAllowedLevel)
{
ArgumentNullException.ThrowIfNull(records);
var violating = records.Where(r => r.LogLevel > maxAllowedLevel).ToList();
if (violating.Count > 0)
{
var messages = string.Join("; ", violating.Select(r => $"[{r.LogLevel}] {r.Message}"));
throw new ContractViolationException(
$"Found {violating.Count} logs above {maxAllowedLevel}: {messages}");
}
}
}
/// <summary>
/// Captured log record for contract testing.
/// </summary>
public sealed record CapturedLogRecord
{
/// <summary>
/// The log level.
/// </summary>
public required LogLevel LogLevel { get; init; }
/// <summary>
/// The formatted message.
/// </summary>
public required string? Message { get; init; }
/// <summary>
/// The event ID.
/// </summary>
public EventId EventId { get; init; }
/// <summary>
/// Exception associated with the log, if any.
/// </summary>
public Exception? Exception { get; init; }
/// <summary>
/// Values from the current logging scope.
/// </summary>
public IReadOnlyDictionary<string, object?> ScopeValues { get; init; } =
new Dictionary<string, object?>();
/// <summary>
/// Values from the log state (message template parameters).
/// </summary>
public IReadOnlyDictionary<string, object?> StateValues { get; init; } =
new Dictionary<string, object?>();
/// <summary>
/// Timestamp when the log was recorded.
/// </summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// The category (logger name).
/// </summary>
public string? Category { get; init; }
}

View File

@@ -0,0 +1,360 @@
using System.Diagnostics.Metrics;
namespace StellaOps.TestKit.Observability;
/// <summary>
/// Assertion helpers for metrics contract testing.
/// </summary>
/// <remarks>
/// These assertions validate that metrics conform to expected contracts:
/// metric existence, label cardinality, monotonicity, and naming conventions.
///
/// Usage:
/// <code>
/// var capture = new MetricsCapture("MyService");
/// await service.ProcessAsync();
///
/// MetricsContractAssert.MetricExists(capture, "requests_total");
/// MetricsContractAssert.LabelCardinalityBounded(capture, "http_requests_total", maxLabels: 50);
/// MetricsContractAssert.CounterMonotonic(capture, "processed_items_total");
/// </code>
/// </remarks>
public static class MetricsContractAssert
{
/// <summary>
/// Asserts that a metric with the specified name exists.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="metricName">The expected metric name.</param>
/// <exception cref="ContractViolationException">Thrown when metric doesn't exist.</exception>
public static void MetricExists(MetricsCapture capture, string metricName)
{
ArgumentNullException.ThrowIfNull(capture);
if (!capture.HasMetric(metricName))
{
throw new ContractViolationException(
$"Expected metric '{metricName}' not found. " +
$"Available metrics: [{string.Join(", ", capture.MetricNames)}]");
}
}
/// <summary>
/// Asserts that a metric's label cardinality is within bounds.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="metricName">The metric to check.</param>
/// <param name="maxLabels">Maximum allowed unique label combinations.</param>
/// <exception cref="ContractViolationException">Thrown when cardinality exceeds threshold.</exception>
public static void LabelCardinalityBounded(MetricsCapture capture, string metricName, int maxLabels)
{
ArgumentNullException.ThrowIfNull(capture);
var cardinality = capture.GetLabelCardinality(metricName);
if (cardinality > maxLabels)
{
throw new ContractViolationException(
$"Metric '{metricName}' has cardinality {cardinality}, exceeds max {maxLabels}. " +
"High cardinality metrics cause storage and performance issues.");
}
}
/// <summary>
/// Asserts that a counter metric is monotonically increasing.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="metricName">The counter metric to check.</param>
/// <exception cref="ContractViolationException">Thrown when counter decreases.</exception>
public static void CounterMonotonic(MetricsCapture capture, string metricName)
{
ArgumentNullException.ThrowIfNull(capture);
var values = capture.GetValues(metricName);
double? previous = null;
foreach (var value in values)
{
if (previous.HasValue && value < previous.Value)
{
throw new ContractViolationException(
$"Counter '{metricName}' is not monotonic: decreased from {previous} to {value}");
}
previous = value;
}
}
/// <summary>
/// Asserts that a gauge metric stays within expected bounds.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="metricName">The gauge metric to check.</param>
/// <param name="minValue">Minimum acceptable value.</param>
/// <param name="maxValue">Maximum acceptable value.</param>
/// <exception cref="ContractViolationException">Thrown when gauge exceeds bounds.</exception>
public static void GaugeInBounds(MetricsCapture capture, string metricName, double minValue, double maxValue)
{
ArgumentNullException.ThrowIfNull(capture);
var values = capture.GetValues(metricName);
foreach (var value in values)
{
if (value < minValue || value > maxValue)
{
throw new ContractViolationException(
$"Gauge '{metricName}' value {value} outside bounds [{minValue}, {maxValue}]");
}
}
}
/// <summary>
/// Asserts that metric names follow the expected naming convention.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="pattern">Regex pattern for metric names (e.g., "^[a-z_]+_total$" for counters).</param>
/// <exception cref="ContractViolationException">Thrown when metric names don't match pattern.</exception>
public static void MetricNamesMatchPattern(MetricsCapture capture, string pattern)
{
ArgumentNullException.ThrowIfNull(capture);
var regex = new System.Text.RegularExpressions.Regex(pattern);
var violating = capture.MetricNames.Where(name => !regex.IsMatch(name)).ToList();
if (violating.Count > 0)
{
throw new ContractViolationException(
$"Metric names violate naming convention '{pattern}': [{string.Join(", ", violating)}]");
}
}
/// <summary>
/// Asserts that required metrics are present.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="metricNames">Required metric names.</param>
/// <exception cref="ContractViolationException">Thrown when required metrics are missing.</exception>
public static void HasRequiredMetrics(MetricsCapture capture, params string[] metricNames)
{
ArgumentNullException.ThrowIfNull(capture);
var missing = metricNames.Where(name => !capture.HasMetric(name)).ToList();
if (missing.Count > 0)
{
throw new ContractViolationException(
$"Missing required metrics: [{string.Join(", ", missing)}]");
}
}
/// <summary>
/// Asserts that no metrics have unbounded label values.
/// </summary>
/// <param name="capture">The metrics capture.</param>
/// <param name="forbiddenLabelPatterns">Patterns indicating unbounded values (e.g., IDs, timestamps).</param>
/// <exception cref="ContractViolationException">Thrown when unbounded labels are detected.</exception>
public static void NoUnboundedLabels(MetricsCapture capture, params System.Text.RegularExpressions.Regex[] forbiddenLabelPatterns)
{
ArgumentNullException.ThrowIfNull(capture);
foreach (var metricName in capture.MetricNames)
{
var labels = capture.GetLabels(metricName);
foreach (var (labelName, labelValues) in labels)
{
foreach (var value in labelValues)
{
foreach (var pattern in forbiddenLabelPatterns)
{
if (pattern.IsMatch(value))
{
throw new ContractViolationException(
$"Metric '{metricName}' has potentially unbounded label '{labelName}': " +
$"value '{value}' matches pattern '{pattern}'");
}
}
}
}
}
}
}
/// <summary>
/// Captures metrics for contract testing.
/// </summary>
public sealed class MetricsCapture : IDisposable
{
private readonly Dictionary<string, List<MetricMeasurement>> _measurements = new();
private readonly MeterListener _listener;
private bool _disposed;
/// <summary>
/// Creates a new metrics capture.
/// </summary>
/// <param name="meterName">Optional meter name filter.</param>
public MetricsCapture(string? meterName = null)
{
_listener = new MeterListener
{
InstrumentPublished = (instrument, listener) =>
{
if (meterName == null || instrument.Meter.Name == meterName)
{
listener.EnableMeasurementEvents(instrument);
}
}
};
_listener.SetMeasurementEventCallback<double>(OnMeasurement);
_listener.SetMeasurementEventCallback<long>(OnMeasurementLong);
_listener.SetMeasurementEventCallback<int>(OnMeasurementInt);
_listener.Start();
}
private void OnMeasurement(Instrument instrument, double measurement,
ReadOnlySpan<KeyValuePair<string, object?>> tags, object? state)
{
RecordMeasurement(instrument.Name, measurement, tags);
}
private void OnMeasurementLong(Instrument instrument, long measurement,
ReadOnlySpan<KeyValuePair<string, object?>> tags, object? state)
{
RecordMeasurement(instrument.Name, measurement, tags);
}
private void OnMeasurementInt(Instrument instrument, int measurement,
ReadOnlySpan<KeyValuePair<string, object?>> tags, object? state)
{
RecordMeasurement(instrument.Name, measurement, tags);
}
private void RecordMeasurement(string name, double value, ReadOnlySpan<KeyValuePair<string, object?>> tags)
{
lock (_measurements)
{
if (!_measurements.TryGetValue(name, out var list))
{
list = new List<MetricMeasurement>();
_measurements[name] = list;
}
list.Add(new MetricMeasurement
{
Value = value,
Tags = tags.ToArray().ToDictionary(
t => t.Key,
t => t.Value?.ToString() ?? ""),
Timestamp = DateTimeOffset.UtcNow
});
}
}
/// <summary>
/// Gets all metric names that have been recorded.
/// </summary>
public IReadOnlyList<string> MetricNames
{
get
{
lock (_measurements)
{
return _measurements.Keys.ToList();
}
}
}
/// <summary>
/// Checks if a metric has been recorded.
/// </summary>
public bool HasMetric(string name)
{
lock (_measurements)
{
return _measurements.ContainsKey(name);
}
}
/// <summary>
/// Gets all recorded values for a metric.
/// </summary>
public IReadOnlyList<double> GetValues(string metricName)
{
lock (_measurements)
{
if (_measurements.TryGetValue(metricName, out var list))
{
return list.Select(m => m.Value).ToList();
}
return Array.Empty<double>();
}
}
/// <summary>
/// Gets the cardinality (number of unique label combinations) for a metric.
/// </summary>
public int GetLabelCardinality(string metricName)
{
lock (_measurements)
{
if (_measurements.TryGetValue(metricName, out var list))
{
return list
.Select(m => string.Join(",", m.Tags.OrderBy(t => t.Key).Select(t => $"{t.Key}={t.Value}")))
.Distinct()
.Count();
}
return 0;
}
}
/// <summary>
/// Gets all unique label values for a metric.
/// </summary>
public IReadOnlyDictionary<string, IReadOnlyList<string>> GetLabels(string metricName)
{
lock (_measurements)
{
if (!_measurements.TryGetValue(metricName, out var list))
{
return new Dictionary<string, IReadOnlyList<string>>();
}
var result = new Dictionary<string, HashSet<string>>();
foreach (var measurement in list)
{
foreach (var (key, value) in measurement.Tags)
{
if (!result.TryGetValue(key, out var values))
{
values = new HashSet<string>();
result[key] = values;
}
values.Add(value);
}
}
return result.ToDictionary(
kvp => kvp.Key,
kvp => (IReadOnlyList<string>)kvp.Value.ToList());
}
}
/// <inheritdoc />
public void Dispose()
{
if (_disposed) return;
_listener.Dispose();
_disposed = true;
}
private sealed record MetricMeasurement
{
public double Value { get; init; }
public Dictionary<string, string> Tags { get; init; } = new();
public DateTimeOffset Timestamp { get; init; }
}
}

View File

@@ -0,0 +1,223 @@
using System.Diagnostics;
namespace StellaOps.TestKit.Observability;
/// <summary>
/// Assertion helpers for OpenTelemetry contract testing.
/// </summary>
/// <remarks>
/// These assertions validate that telemetry conforms to expected contracts:
/// required spans, attributes, cardinality limits, and schema compliance.
///
/// Usage:
/// <code>
/// using var capture = new OtelCapture("MyService");
/// await service.ProcessAsync();
///
/// OTelContractAssert.HasRequiredSpans(capture, "ProcessRequest", "ValidateInput", "SaveResult");
/// OTelContractAssert.SpanHasAttributes(capture.CapturedActivities[0], "user_id", "tenant_id");
/// OTelContractAssert.NoHighCardinalityAttributes(capture, threshold: 100);
/// </code>
/// </remarks>
public static class OTelContractAssert
{
/// <summary>
/// Asserts that all required span names are present in the capture.
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <param name="spanNames">Required span names that must all be present.</param>
/// <exception cref="ContractViolationException">Thrown when required spans are missing.</exception>
public static void HasRequiredSpans(OtelCapture capture, params string[] spanNames)
{
ArgumentNullException.ThrowIfNull(capture);
ArgumentNullException.ThrowIfNull(spanNames);
var capturedNames = capture.CapturedActivities
.Select(a => a.DisplayName ?? a.OperationName)
.ToHashSet(StringComparer.Ordinal);
var missing = spanNames.Where(name => !capturedNames.Contains(name)).ToList();
if (missing.Count > 0)
{
throw new ContractViolationException(
$"Missing required spans: [{string.Join(", ", missing)}]. " +
$"Captured spans: [{string.Join(", ", capturedNames)}]");
}
}
/// <summary>
/// Asserts that a span has all required attributes.
/// </summary>
/// <param name="span">The span (Activity) to check.</param>
/// <param name="attributeNames">Required attribute names.</param>
/// <exception cref="ContractViolationException">Thrown when required attributes are missing.</exception>
public static void SpanHasAttributes(Activity span, params string[] attributeNames)
{
ArgumentNullException.ThrowIfNull(span);
ArgumentNullException.ThrowIfNull(attributeNames);
var spanAttributes = span.Tags.Select(t => t.Key).ToHashSet(StringComparer.Ordinal);
var missing = attributeNames.Where(name => !spanAttributes.Contains(name)).ToList();
if (missing.Count > 0)
{
throw new ContractViolationException(
$"Span '{span.DisplayName}' missing required attributes: [{string.Join(", ", missing)}]. " +
$"Present attributes: [{string.Join(", ", spanAttributes)}]");
}
}
/// <summary>
/// Asserts that an attribute's cardinality (number of unique values) is within bounds.
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <param name="attributeName">The attribute to check.</param>
/// <param name="maxCardinality">Maximum allowed unique values.</param>
/// <exception cref="ContractViolationException">Thrown when cardinality exceeds threshold.</exception>
public static void AttributeCardinality(OtelCapture capture, string attributeName, int maxCardinality)
{
ArgumentNullException.ThrowIfNull(capture);
var uniqueValues = capture.CapturedActivities
.SelectMany(a => a.Tags)
.Where(t => t.Key == attributeName)
.Select(t => t.Value)
.Distinct()
.Count();
if (uniqueValues > maxCardinality)
{
throw new ContractViolationException(
$"Attribute '{attributeName}' has cardinality {uniqueValues}, exceeds max {maxCardinality}. " +
"High cardinality attributes can cause metric explosion and storage issues.");
}
}
/// <summary>
/// Asserts that no attribute exceeds the cardinality threshold across all spans.
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <param name="threshold">Maximum cardinality threshold (default 100).</param>
/// <exception cref="ContractViolationException">Thrown when any attribute exceeds threshold.</exception>
public static void NoHighCardinalityAttributes(OtelCapture capture, int threshold = 100)
{
ArgumentNullException.ThrowIfNull(capture);
var cardinalityByAttribute = capture.CapturedActivities
.SelectMany(a => a.Tags)
.GroupBy(t => t.Key)
.Select(g => new { Attribute = g.Key, Cardinality = g.Select(t => t.Value).Distinct().Count() })
.Where(x => x.Cardinality > threshold)
.ToList();
if (cardinalityByAttribute.Count > 0)
{
var violations = string.Join(", ",
cardinalityByAttribute.Select(x => $"{x.Attribute}={x.Cardinality}"));
throw new ContractViolationException(
$"High cardinality attributes detected (threshold={threshold}): {violations}");
}
}
/// <summary>
/// Asserts that span names follow the expected naming convention.
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <param name="pattern">Regex pattern that span names should match (e.g., "^[A-Z][a-z]+\\.[A-Z][a-z]+$").</param>
/// <exception cref="ContractViolationException">Thrown when span names don't match pattern.</exception>
public static void SpanNamesMatchPattern(OtelCapture capture, string pattern)
{
ArgumentNullException.ThrowIfNull(capture);
var regex = new System.Text.RegularExpressions.Regex(pattern);
var violating = capture.CapturedActivities
.Select(a => a.DisplayName ?? a.OperationName)
.Where(name => !regex.IsMatch(name))
.ToList();
if (violating.Count > 0)
{
throw new ContractViolationException(
$"Span names violate naming convention '{pattern}': [{string.Join(", ", violating)}]");
}
}
/// <summary>
/// Asserts that all spans have a status code set (not Unset).
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <exception cref="ContractViolationException">Thrown when spans have Unset status.</exception>
public static void AllSpansHaveStatus(OtelCapture capture)
{
ArgumentNullException.ThrowIfNull(capture);
var unsetSpans = capture.CapturedActivities
.Where(a => a.Status == ActivityStatusCode.Unset)
.Select(a => a.DisplayName ?? a.OperationName)
.ToList();
if (unsetSpans.Count > 0)
{
throw new ContractViolationException(
$"Spans with unset status (should be Ok or Error): [{string.Join(", ", unsetSpans)}]");
}
}
/// <summary>
/// Asserts that error spans have the expected error attributes.
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <param name="requiredErrorAttributes">Attributes required on error spans (e.g., "exception.type", "exception.message").</param>
/// <exception cref="ContractViolationException">Thrown when error spans are missing required attributes.</exception>
public static void ErrorSpansHaveAttributes(OtelCapture capture, params string[] requiredErrorAttributes)
{
ArgumentNullException.ThrowIfNull(capture);
var errorSpans = capture.CapturedActivities
.Where(a => a.Status == ActivityStatusCode.Error)
.ToList();
foreach (var span in errorSpans)
{
var spanAttributes = span.Tags.Select(t => t.Key).ToHashSet(StringComparer.Ordinal);
var missing = requiredErrorAttributes.Where(attr => !spanAttributes.Contains(attr)).ToList();
if (missing.Count > 0)
{
throw new ContractViolationException(
$"Error span '{span.DisplayName}' missing required error attributes: [{string.Join(", ", missing)}]");
}
}
}
/// <summary>
/// Asserts that spans don't contain sensitive data patterns in their attributes.
/// </summary>
/// <param name="capture">The OTel capture containing recorded spans.</param>
/// <param name="sensitivePatterns">Regex patterns for sensitive data (e.g., email, SSN, credit card).</param>
/// <exception cref="ContractViolationException">Thrown when sensitive data is detected.</exception>
public static void NoSensitiveDataInSpans(OtelCapture capture, params System.Text.RegularExpressions.Regex[] sensitivePatterns)
{
ArgumentNullException.ThrowIfNull(capture);
foreach (var span in capture.CapturedActivities)
{
foreach (var tag in span.Tags)
{
if (tag.Value == null) continue;
foreach (var pattern in sensitivePatterns)
{
if (pattern.IsMatch(tag.Value))
{
throw new ContractViolationException(
$"Potential sensitive data in span '{span.DisplayName}', attribute '{tag.Key}': " +
$"value matches pattern '{pattern}'");
}
}
}
}
}
}

View File

@@ -11,8 +11,9 @@
<Description>Testing infrastructure and utilities for StellaOps</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
<PackageReference Include="xunit.v3.assert" />
<PackageReference Include="xunit.v3.core" />
<PackageReference Include="xunit.v3.extensibility.core" />
<PackageReference Include="FluentAssertions" />
<PackageReference Include="FsCheck" />
<PackageReference Include="FsCheck.Xunit.v3" PrivateAssets="all" />

View File

@@ -247,4 +247,36 @@ public static class TestCategories
/// Parity tests: Competitor comparison, benchmark parity validation.
/// </summary>
public const string Parity = "Parity";
// =========================================================================
// Turn #6 testing enhancements categories
// =========================================================================
/// <summary>
/// Post-incident regression tests: Tests derived from production incidents.
/// P1/P2 incident tests block releases.
/// </summary>
public const string PostIncident = "PostIncident";
/// <summary>
/// Evidence chain tests: Requirement traceability, artifact hash verification.
/// </summary>
public const string EvidenceChain = "EvidenceChain";
/// <summary>
/// Longevity tests: Time-extended stability tests for memory leaks, counter drift.
/// Run nightly, not PR-gating.
/// </summary>
public const string Longevity = "Longevity";
/// <summary>
/// Interop tests: Cross-version compatibility, N-1/N+1 service interoperability.
/// Release-gating tests.
/// </summary>
public const string Interop = "Interop";
/// <summary>
/// Environment skew tests: Testing across varied infrastructure profiles.
/// </summary>
public const string EnvironmentSkew = "EnvironmentSkew";
}

View File

@@ -0,0 +1,77 @@
using Xunit.v3;
namespace StellaOps.TestKit.Traits;
/// <summary>
/// Declares the business intent of a test with optional rationale.
/// </summary>
/// <remarks>
/// Intent attributes provide richer metadata than trait strings alone.
/// The attribute supports capturing rationale for audit trails and documentation.
///
/// Usage:
/// <code>
/// [Fact]
/// [Intent(TestIntents.Regulatory, "Required for SOC2 AU-12 control")]
/// public async Task TestAuditLogImmutability()
/// {
/// // Verify audit logs cannot be modified after creation
/// }
///
/// [Fact]
/// [Intent(TestIntents.Safety, "Prevents SQL injection per OWASP A03:2021")]
/// public void TestInputSanitization()
/// {
/// // Verify SQL injection prevention
/// }
/// </code>
///
/// The attribute automatically adds the xUnit Trait for filtering:
/// <code>
/// dotnet test --filter "Intent=Regulatory"
/// </code>
/// </remarks>
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = true, Inherited = true)]
public sealed class IntentAttribute : Attribute, ITraitAttribute
{
/// <summary>
/// The intent category (should be one of <see cref="TestIntents"/> constants).
/// </summary>
public string Intent { get; }
/// <summary>
/// Optional rationale explaining why this test has this intent.
/// </summary>
/// <remarks>
/// Rationale should reference requirement documents, compliance controls,
/// security advisories, or other authoritative sources.
/// </remarks>
public string Rationale { get; }
/// <summary>
/// Creates an intent declaration with optional rationale.
/// </summary>
/// <param name="intent">The intent category (use <see cref="TestIntents"/> constants).</param>
/// <param name="rationale">Optional rationale explaining the intent assignment.</param>
public IntentAttribute(string intent, string rationale = "")
{
Intent = intent;
Rationale = rationale;
}
/// <inheritdoc />
public IReadOnlyCollection<KeyValuePair<string, string>> GetTraits()
{
var traits = new List<KeyValuePair<string, string>>
{
new("Intent", Intent)
};
if (!string.IsNullOrWhiteSpace(Rationale))
{
traits.Add(new("IntentRationale", Rationale));
}
return traits;
}
}

View File

@@ -0,0 +1,94 @@
namespace StellaOps.TestKit.Traits;
/// <summary>
/// Test intent categories for classifying the purpose and business value of tests.
/// </summary>
/// <remarks>
/// Intent tagging helps CI detect behavior changes that violate declared intent,
/// even when tests pass. Use alongside Category traits to provide complete classification.
///
/// Usage with xUnit:
/// <code>
/// [Fact]
/// [Trait("Category", TestCategories.Integration)]
/// [Trait("Intent", TestIntents.Regulatory)]
/// [Intent(TestIntents.Regulatory, "Required for SOC2 compliance audit")]
/// public async Task TestAuditTrailImmutability() { }
/// </code>
///
/// Filter by intent during test runs:
/// <code>
/// dotnet test --filter "Intent=Regulatory"
/// dotnet test --filter "Intent=Safety|Intent=Regulatory"
/// </code>
/// </remarks>
public static class TestIntents
{
/// <summary>
/// Regulatory tests: Compliance, audit requirements, legal obligations.
/// </summary>
/// <remarks>
/// Tests in this category validate behavior required for regulatory compliance
/// (SOC2, GDPR, FedRAMP, etc.). Failures may have legal or certification impact.
/// These tests must link to specific requirement documents or controls.
/// </remarks>
public const string Regulatory = "Regulatory";
/// <summary>
/// Safety tests: Security, fail-secure behavior, cryptographic correctness.
/// </summary>
/// <remarks>
/// Tests in this category validate security-critical behavior: authentication,
/// authorization, cryptographic operations, input validation, injection prevention.
/// Failures may result in security vulnerabilities or data breaches.
/// </remarks>
public const string Safety = "Safety";
/// <summary>
/// Performance tests: Latency, throughput, resource usage guarantees.
/// </summary>
/// <remarks>
/// Tests in this category validate performance characteristics that are part
/// of the product promise: SLA latency bounds, throughput targets, memory limits.
/// Failures may result in degraded user experience or SLA violations.
/// </remarks>
public const string Performance = "Performance";
/// <summary>
/// Competitive tests: Feature parity with competitor tools, market requirements.
/// </summary>
/// <remarks>
/// Tests in this category validate features that provide competitive parity
/// or differentiation. Failures may result in customer churn or lost deals.
/// Link to product requirements or competitor feature matrices.
/// </remarks>
public const string Competitive = "Competitive";
/// <summary>
/// Operational tests: Observability, diagnosability, operational workflows.
/// </summary>
/// <remarks>
/// Tests in this category validate operational characteristics: logging,
/// metrics, tracing, health checks, graceful degradation, recovery procedures.
/// Failures may result in increased MTTR or operational incidents.
/// </remarks>
public const string Operational = "Operational";
/// <summary>
/// Get all defined intent categories.
/// </summary>
public static IReadOnlyList<string> All { get; } = new[]
{
Regulatory,
Safety,
Performance,
Competitive,
Operational
};
/// <summary>
/// Validate that a string is a valid intent category.
/// </summary>
public static bool IsValid(string intent) =>
All.Contains(intent, StringComparer.OrdinalIgnoreCase);
}

View File

@@ -0,0 +1,326 @@
using FluentAssertions;
using StellaOps.TestKit.Environment;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for environment skew testing infrastructure.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class EnvironmentSkewTests
{
#region EnvironmentProfile Tests
[Fact]
public void EnvironmentProfile_Standard_HasCorrectDefaults()
{
// Arrange & Act
var profile = EnvironmentProfile.Standard;
// Assert
profile.Name.Should().Be("Standard");
profile.Cpu.Architecture.Should().Be(CpuArchitecture.X64);
profile.Network.Latency.Should().Be(TimeSpan.Zero);
profile.Runtime.Should().Be(ContainerRuntime.Docker);
}
[Fact]
public void EnvironmentProfile_HighLatency_Has100msLatency()
{
// Arrange & Act
var profile = EnvironmentProfile.HighLatency;
// Assert
profile.Name.Should().Be("HighLatency");
profile.Network.Latency.Should().Be(TimeSpan.FromMilliseconds(100));
}
[Fact]
public void EnvironmentProfile_LowBandwidth_Has10MbpsLimit()
{
// Arrange & Act
var profile = EnvironmentProfile.LowBandwidth;
// Assert
profile.Name.Should().Be("LowBandwidth");
profile.Network.BandwidthMbps.Should().Be(10);
}
[Fact]
public void EnvironmentProfile_PacketLoss_Has1PercentLoss()
{
// Arrange & Act
var profile = EnvironmentProfile.PacketLoss;
// Assert
profile.Name.Should().Be("PacketLoss");
profile.Network.PacketLossRate.Should().Be(0.01);
}
[Fact]
public void EnvironmentProfile_ArmCpu_HasArm64Architecture()
{
// Arrange & Act
var profile = EnvironmentProfile.ArmCpu;
// Assert
profile.Name.Should().Be("ArmCpu");
profile.Cpu.Architecture.Should().Be(CpuArchitecture.Arm64);
}
[Fact]
public void EnvironmentProfile_ResourceConstrained_HasLimits()
{
// Arrange & Act
var profile = EnvironmentProfile.ResourceConstrained;
// Assert
profile.Name.Should().Be("ResourceConstrained");
profile.ResourceLimits.MemoryMb.Should().Be(256);
profile.ResourceLimits.CpuCores.Should().Be(1);
}
[Fact]
public void EnvironmentProfile_All_ContainsExpectedProfiles()
{
// Arrange & Act
var profiles = EnvironmentProfile.All;
// Assert
profiles.Should().HaveCount(5);
profiles.Should().Contain(p => p.Name == "Standard");
profiles.Should().Contain(p => p.Name == "HighLatency");
profiles.Should().Contain(p => p.Name == "LowBandwidth");
profiles.Should().Contain(p => p.Name == "PacketLoss");
profiles.Should().Contain(p => p.Name == "ResourceConstrained");
}
[Fact]
public void NetworkProfile_RequiresNetworkShaping_ReturnsTrueWhenConfigured()
{
// Arrange & Act & Assert
new NetworkProfile { Latency = TimeSpan.FromMilliseconds(50) }
.RequiresNetworkShaping.Should().BeTrue();
new NetworkProfile { PacketLossRate = 0.01 }
.RequiresNetworkShaping.Should().BeTrue();
new NetworkProfile { BandwidthMbps = 10 }
.RequiresNetworkShaping.Should().BeTrue();
new NetworkProfile()
.RequiresNetworkShaping.Should().BeFalse();
}
#endregion
#region SkewTestRunner Tests
[Fact]
public async Task SkewTestRunner_RunAcrossProfiles_ExecutesTestForEachProfile()
{
// Arrange
var runner = new SkewTestRunner();
var executedProfiles = new List<string>();
// Act
var report = await runner.RunAcrossProfiles(
test: () =>
{
executedProfiles.Add("executed");
return Task.FromResult(new TestResult { Value = 1.0, DurationMs = 10 });
},
profiles: [EnvironmentProfile.Standard, EnvironmentProfile.HighLatency]);
// Assert
report.ProfileCount.Should().Be(2);
report.Results.Should().HaveCount(2);
}
[Fact]
public async Task SkewTestRunner_RunWithProfile_ExecutesMultipleIterations()
{
// Arrange
var runner = new SkewTestRunner();
var executionCount = 0;
// Act
var result = await runner.RunWithProfile(
test: () =>
{
executionCount++;
return Task.FromResult(new TestResult { Value = executionCount, DurationMs = 10 });
},
profile: EnvironmentProfile.Standard,
iterations: 5);
// Assert
executionCount.Should().Be(5);
result.Results.Should().HaveCount(5);
}
[Fact]
public async Task SkewTestRunner_RunWithProfile_CalculatesAverages()
{
// Arrange
var runner = new SkewTestRunner();
var values = new[] { 10.0, 20.0, 30.0 };
var index = 0;
// Act
var result = await runner.RunWithProfile(
test: () => Task.FromResult(new TestResult
{
Value = values[index++],
DurationMs = 100
}),
profile: EnvironmentProfile.Standard,
iterations: 3);
// Assert
result.AverageValue.Should().Be(20.0); // (10 + 20 + 30) / 3
}
[Fact]
public async Task SkewTestRunner_RunWithProfile_HandlesErrors()
{
// Arrange
var runner = new SkewTestRunner();
var iteration = 0;
// Act
var result = await runner.RunWithProfile(
test: () =>
{
iteration++;
if (iteration == 2)
{
throw new InvalidOperationException("Test error");
}
return Task.FromResult(new TestResult { Value = 1.0, Success = true });
},
profile: EnvironmentProfile.Standard,
iterations: 3);
// Assert
result.Results.Should().HaveCount(3);
result.SuccessRate.Should().BeApproximately(2.0 / 3.0, 0.01);
}
[Fact]
public async Task SkewTestRunner_AssertEquivalence_PassesWhenResultsAreEquivalent()
{
// Arrange
var runner = new SkewTestRunner();
var report = await runner.RunAcrossProfiles(
test: () => Task.FromResult(new TestResult { Value = 100.0, DurationMs = 10 }),
profiles: [EnvironmentProfile.Standard, EnvironmentProfile.HighLatency]);
// Act & Assert
var act = () => runner.AssertEquivalence(report, tolerance: 0.05);
act.Should().NotThrow();
}
[Fact]
public async Task SkewTestRunner_AssertEquivalence_FailsWhenSkewExceedsTolerance()
{
// Arrange
var runner = new SkewTestRunner();
var values = new Queue<double>([100.0, 100.0, 100.0, 200.0, 200.0, 200.0]); // 100% difference
var report = await runner.RunAcrossProfiles(
test: () => Task.FromResult(new TestResult { Value = values.Dequeue(), DurationMs = 10 }),
profiles: [EnvironmentProfile.Standard, EnvironmentProfile.HighLatency]);
// Act & Assert
var act = () => runner.AssertEquivalence(report, tolerance: 0.05);
act.Should().Throw<SkewAssertException>();
}
[Fact]
public async Task SkewTestRunner_AssertEquivalence_IgnoresSingleProfile()
{
// Arrange
var runner = new SkewTestRunner();
var report = await runner.RunAcrossProfiles(
test: () => Task.FromResult(new TestResult { Value = 100.0, DurationMs = 10 }),
profiles: [EnvironmentProfile.Standard]);
// Act & Assert - should not throw for single profile
var act = () => runner.AssertEquivalence(report, tolerance: 0.05);
act.Should().NotThrow();
}
#endregion
#region SkewReport Tests
[Fact]
public async Task SkewReport_ToJson_ProducesValidJson()
{
// Arrange
var runner = new SkewTestRunner();
var report = await runner.RunAcrossProfiles(
test: () => Task.FromResult(new TestResult { Value = 1.0 }),
profiles: [EnvironmentProfile.Standard]);
// Act
var json = report.ToJson();
// Assert
json.Should().Contain("\"generatedAt\"");
json.Should().Contain("\"profileCount\"");
json.Should().Contain("\"hasSkew\"");
}
[Fact]
public async Task SkewReport_ToMarkdown_ProducesValidMarkdown()
{
// Arrange
var runner = new SkewTestRunner();
var report = await runner.RunAcrossProfiles(
test: () => Task.FromResult(new TestResult { Value = 1.0 }),
profiles: [EnvironmentProfile.Standard]);
// Act
var markdown = report.ToMarkdown();
// Assert
markdown.Should().Contain("# Environment Skew Report");
markdown.Should().Contain("| Profile |");
markdown.Should().Contain("Standard");
}
#endregion
#region TestResult Tests
[Fact]
public void TestResult_Defaults_AreCorrect()
{
// Arrange & Act
var result = new TestResult();
// Assert
result.Success.Should().BeTrue();
result.ProfileName.Should().BeEmpty();
result.Metadata.Should().BeEmpty();
}
#endregion
#region SkewAssertException Tests
[Fact]
public void SkewAssertException_SetsMessage()
{
// Arrange & Act
var ex = new SkewAssertException("Test message");
// Assert
ex.Message.Should().Be("Test message");
}
#endregion
}

View File

@@ -0,0 +1,400 @@
using System.Reflection;
using FluentAssertions;
using StellaOps.TestKit.Evidence;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for evidence chain traceability infrastructure.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class EvidenceChainTests
{
#region RequirementAttribute Tests
[Fact]
public void RequirementAttribute_Constructor_SetsRequirementId()
{
// Arrange & Act
var attr = new RequirementAttribute("REQ-TEST-001");
// Assert
attr.RequirementId.Should().Be("REQ-TEST-001");
}
[Fact]
public void RequirementAttribute_Constructor_ThrowsOnNullRequirementId()
{
// Arrange & Act
var act = () => new RequirementAttribute(null!);
// Assert
act.Should().Throw<ArgumentNullException>();
}
[Fact]
public void RequirementAttribute_OptionalProperties_DefaultToEmpty()
{
// Arrange & Act
var attr = new RequirementAttribute("REQ-TEST-001");
// Assert
attr.SprintTaskId.Should().BeEmpty();
attr.ComplianceControl.Should().BeEmpty();
attr.SourceDocument.Should().BeEmpty();
}
[Fact]
public void RequirementAttribute_GetTraits_ReturnsRequirementTrait()
{
// Arrange
var attr = new RequirementAttribute("REQ-TEST-001");
// Act
var traits = attr.GetTraits();
// Assert
traits.Should().ContainSingle(t => t.Key == "Requirement" && t.Value == "REQ-TEST-001");
}
[Fact]
public void RequirementAttribute_GetTraits_IncludesSprintTaskWhenSet()
{
// Arrange
var attr = new RequirementAttribute("REQ-TEST-001") { SprintTaskId = "SPRINT-001" };
// Act
var traits = attr.GetTraits();
// Assert
traits.Should().Contain(t => t.Key == "Requirement" && t.Value == "REQ-TEST-001");
traits.Should().Contain(t => t.Key == "SprintTask" && t.Value == "SPRINT-001");
}
[Fact]
public void RequirementAttribute_GetTraits_IncludesComplianceControlWhenSet()
{
// Arrange
var attr = new RequirementAttribute("REQ-TEST-001") { ComplianceControl = "SOC2-CC6.1" };
// Act
var traits = attr.GetTraits();
// Assert
traits.Should().Contain(t => t.Key == "ComplianceControl" && t.Value == "SOC2-CC6.1");
}
#endregion
#region EvidenceChainAssert Tests
[Fact]
public void ComputeSha256_ReturnsLowercaseHex()
{
// Arrange
var content = "test content";
// Act
var hash = EvidenceChainAssert.ComputeSha256(content);
// Assert
hash.Should().NotBeNullOrEmpty();
hash.Should().MatchRegex("^[0-9a-f]{64}$");
}
[Fact]
public void ComputeSha256_IsDeterministic()
{
// Arrange
var content = "deterministic test";
// Act
var hash1 = EvidenceChainAssert.ComputeSha256(content);
var hash2 = EvidenceChainAssert.ComputeSha256(content);
// Assert
hash1.Should().Be(hash2);
}
[Fact]
public void ComputeSha256_Bytes_MatchesStringVersion()
{
// Arrange
var content = "test content";
var bytes = System.Text.Encoding.UTF8.GetBytes(content);
// Act
var hashFromString = EvidenceChainAssert.ComputeSha256(content);
var hashFromBytes = EvidenceChainAssert.ComputeSha256(bytes);
// Assert
hashFromString.Should().Be(hashFromBytes);
}
[Fact]
public void ArtifactHashStable_PassesWithCorrectHash()
{
// Arrange
var content = "test artifact";
var expectedHash = EvidenceChainAssert.ComputeSha256(content);
// Act & Assert
var act = () => EvidenceChainAssert.ArtifactHashStable(content, expectedHash);
act.Should().NotThrow();
}
[Fact]
public void ArtifactHashStable_ThrowsWithIncorrectHash()
{
// Arrange
var content = "test artifact";
var wrongHash = new string('0', 64);
// Act & Assert
var act = () => EvidenceChainAssert.ArtifactHashStable(content, wrongHash);
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*Artifact hash mismatch*");
}
[Fact]
public void ArtifactHashStable_ThrowsOnNullArtifact()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.ArtifactHashStable((byte[])null!, "hash");
act.Should().Throw<ArgumentNullException>();
}
[Fact]
public void ArtifactImmutable_PassesWithDeterministicGenerator()
{
// Arrange
var counter = 0;
Func<string> generator = () =>
{
counter++;
return "immutable content";
};
// Act & Assert
var act = () => EvidenceChainAssert.ArtifactImmutable(generator, iterations: 5);
act.Should().NotThrow();
counter.Should().Be(5);
}
[Fact]
public void ArtifactImmutable_ThrowsWithNonDeterministicGenerator()
{
// Arrange
var counter = 0;
Func<string> generator = () =>
{
counter++;
return $"non-deterministic content {counter}";
};
// Act & Assert
var act = () => EvidenceChainAssert.ArtifactImmutable(generator, iterations: 5);
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*Artifact not immutable*");
}
[Fact]
public void ArtifactImmutable_ThrowsWithLessThanTwoIterations()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.ArtifactImmutable(() => "content", iterations: 1);
act.Should().Throw<ArgumentOutOfRangeException>();
}
[Fact]
public void RequirementLinked_PassesWithValidRequirementId()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.RequirementLinked("REQ-TEST-001");
act.Should().NotThrow();
}
[Fact]
public void RequirementLinked_ThrowsWithEmptyRequirementId()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.RequirementLinked("");
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*cannot be empty*");
}
[Fact]
public void RequirementLinked_ThrowsWithWhitespaceRequirementId()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.RequirementLinked(" ");
act.Should().Throw<EvidenceTraceabilityException>();
}
[Fact]
public void TraceabilityComplete_PassesWithAllComponents()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.TraceabilityComplete(
"REQ-001",
"MyTests.TestMethod",
"sha256:abc123");
act.Should().NotThrow();
}
[Fact]
public void TraceabilityComplete_ThrowsWithMissingRequirement()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.TraceabilityComplete(
"",
"MyTests.TestMethod",
"sha256:abc123");
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*Requirement ID is missing*");
}
[Fact]
public void TraceabilityComplete_ThrowsWithMissingTestId()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.TraceabilityComplete(
"REQ-001",
null!,
"sha256:abc123");
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*Test ID is missing*");
}
[Fact]
public void TraceabilityComplete_ThrowsWithMissingArtifactId()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.TraceabilityComplete(
"REQ-001",
"MyTests.TestMethod",
" ");
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*Artifact ID is missing*");
}
[Fact]
public void TraceabilityComplete_ReportsAllMissingComponents()
{
// Arrange & Act & Assert
var act = () => EvidenceChainAssert.TraceabilityComplete("", "", "");
act.Should().Throw<EvidenceTraceabilityException>()
.WithMessage("*Requirement ID is missing*")
.WithMessage("*Test ID is missing*")
.WithMessage("*Artifact ID is missing*");
}
#endregion
#region EvidenceChainReporter Tests
[Fact]
public void EvidenceChainReporter_GenerateReport_ReturnsEmptyReportForNoAssemblies()
{
// Arrange
var reporter = new EvidenceChainReporter();
// Act
var report = reporter.GenerateReport();
// Assert
report.TotalRequirements.Should().Be(0);
report.TotalTests.Should().Be(0);
report.AssembliesScanned.Should().BeEmpty();
}
[Fact]
public void EvidenceChainReporter_GenerateReport_ScansAssemblyForRequirements()
{
// Arrange
var reporter = new EvidenceChainReporter();
reporter.AddAssembly(typeof(EvidenceChainTests).Assembly);
// Act
var report = reporter.GenerateReport();
// Assert
report.AssembliesScanned.Should().Contain("StellaOps.TestKit.Tests");
}
[Fact]
public void EvidenceChainReport_ToJson_ProducesValidJson()
{
// Arrange
var reporter = new EvidenceChainReporter();
var report = reporter.GenerateReport();
// Act
var json = report.ToJson();
// Assert
json.Should().NotBeNullOrEmpty();
json.Should().Contain("\"totalRequirements\"");
json.Should().Contain("\"totalTests\"");
}
[Fact]
public void EvidenceChainReport_ToMarkdown_ProducesValidMarkdown()
{
// Arrange
var reporter = new EvidenceChainReporter();
var report = reporter.GenerateReport();
// Act
var markdown = report.ToMarkdown();
// Assert
markdown.Should().Contain("# Evidence Chain Traceability Report");
markdown.Should().Contain("## Traceability Matrix");
}
#endregion
#region EvidenceTraceabilityException Tests
[Fact]
public void EvidenceTraceabilityException_ConstructorWithMessage_SetsMessage()
{
// Arrange & Act
var ex = new EvidenceTraceabilityException("Test error");
// Assert
ex.Message.Should().Be("Test error");
}
[Fact]
public void EvidenceTraceabilityException_ConstructorWithInnerException_SetsInnerException()
{
// Arrange
var inner = new InvalidOperationException("Inner");
// Act
var ex = new EvidenceTraceabilityException("Outer", inner);
// Assert
ex.Message.Should().Be("Outer");
ex.InnerException.Should().Be(inner);
}
#endregion
}
/// <summary>
/// Test class with [Requirement] attribute for reporter testing.
/// </summary>
[Requirement("REQ-REPORTER-TEST-001")]
public sealed class RequirementTestFixture
{
[Fact]
[Requirement("REQ-REPORTER-TEST-002", SprintTaskId = "TEST-001")]
public void SampleTestWithRequirement()
{
// This test exists to verify the reporter can scan for [Requirement] attributes
}
}

View File

@@ -0,0 +1,360 @@
using FluentAssertions;
using StellaOps.TestKit.Incident;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for post-incident test generation infrastructure.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class IncidentTestGeneratorTests
{
private static readonly string SampleManifestJson = """
{
"schemaVersion": "2.0",
"scan": {
"id": "scan-001",
"time": "2026-01-15T10:30:00Z",
"policyDigest": "sha256:abc123",
"scorePolicyDigest": "sha256:def456"
},
"reachability": {
"analysisId": "analysis-001",
"graphs": [
{
"kind": "static",
"hash": "sha256:graph123",
"analyzer": "java-callgraph"
}
],
"runtimeTraces": []
}
}
""";
private static readonly IncidentMetadata SampleMetadata = new()
{
IncidentId = "INC-2026-001",
OccurredAt = DateTimeOffset.Parse("2026-01-15T10:30:00Z"),
RootCause = "Race condition in concurrent writes",
AffectedModules = ["EvidenceLocker", "Policy"],
Severity = IncidentSeverity.P1,
Title = "Evidence bundle duplication"
};
#region IncidentMetadata Tests
[Fact]
public void IncidentMetadata_RequiredProperties_AreSet()
{
// Arrange & Act
var metadata = new IncidentMetadata
{
IncidentId = "INC-001",
OccurredAt = DateTimeOffset.UtcNow,
RootCause = "Test cause",
AffectedModules = ["Module1"],
Severity = IncidentSeverity.P2
};
// Assert
metadata.IncidentId.Should().Be("INC-001");
metadata.RootCause.Should().Be("Test cause");
metadata.Severity.Should().Be(IncidentSeverity.P2);
}
[Fact]
public void IncidentMetadata_OptionalProperties_HaveDefaults()
{
// Arrange & Act
var metadata = new IncidentMetadata
{
IncidentId = "INC-001",
OccurredAt = DateTimeOffset.UtcNow,
RootCause = "Test",
AffectedModules = ["Module1"],
Severity = IncidentSeverity.P3
};
// Assert
metadata.Title.Should().BeEmpty();
metadata.ReportUrl.Should().BeEmpty();
metadata.ResolvedAt.Should().BeNull();
metadata.CorrelationIds.Should().BeEmpty();
metadata.FixTaskId.Should().BeEmpty();
metadata.Tags.Should().BeEmpty();
}
[Fact]
public void IncidentSeverity_P1_HasCorrectValue()
{
// Assert
((int)IncidentSeverity.P1).Should().Be(1);
((int)IncidentSeverity.P2).Should().Be(2);
((int)IncidentSeverity.P3).Should().Be(3);
((int)IncidentSeverity.P4).Should().Be(4);
}
#endregion
#region IncidentTestGenerator Tests
[Fact]
public void GenerateFromManifestJson_CreatesValidScaffold()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Assert
scaffold.Should().NotBeNull();
scaffold.Metadata.Should().Be(SampleMetadata);
scaffold.TestClassName.Should().Contain("INC_2026_001");
scaffold.TestMethodName.Should().Contain("Validates");
scaffold.ReplayManifestHash.Should().StartWith("sha256:");
}
[Fact]
public void GenerateFromManifestJson_ExtractsInputFixtures()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Assert
scaffold.InputFixtures.Should().ContainKey("scan");
scaffold.InputFixtures.Should().ContainKey("reachabilityGraphs");
}
[Fact]
public void GenerateFromManifestJson_ExtractsExpectedOutputs()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Assert
scaffold.ExpectedOutputs.Should().ContainKey("policyDigest");
scaffold.ExpectedOutputs["policyDigest"].Should().Be("sha256:abc123");
}
[Fact]
public void GenerateFromManifestJson_GeneratesImplementationNotes()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Assert
scaffold.ImplementationNotes.Should().NotBeEmpty();
scaffold.ImplementationNotes.Should().Contain(n => n.Contains("INC-2026-001"));
scaffold.ImplementationNotes.Should().Contain(n => n.Contains("Race condition"));
}
[Fact]
public void GenerateFromManifestJson_SetsNamespaceFromModule()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Assert
scaffold.Namespace.Should().Contain("EvidenceLocker");
}
[Fact]
public void GenerateFromManifestJson_ThrowsOnNullManifest()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act & Assert
var act = () => generator.GenerateFromManifestJson(null!, SampleMetadata);
act.Should().Throw<ArgumentNullException>();
}
[Fact]
public void GenerateFromManifestJson_ThrowsOnNullMetadata()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act & Assert
var act = () => generator.GenerateFromManifestJson(SampleManifestJson, null!);
act.Should().Throw<ArgumentNullException>();
}
#endregion
#region RegisterIncidentTest Tests
[Fact]
public void RegisterIncidentTest_AddsToRegistry()
{
// Arrange
var generator = new IncidentTestGenerator();
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Act
generator.RegisterIncidentTest("INC-2026-001", scaffold);
// Assert
generator.RegisteredTests.Should().ContainKey("INC-2026-001");
generator.RegisteredTests["INC-2026-001"].Should().Be(scaffold);
}
[Fact]
public void RegisterIncidentTest_OverwritesExisting()
{
// Arrange
var generator = new IncidentTestGenerator();
var scaffold1 = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
var scaffold2 = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata with { Title = "Updated" });
// Act
generator.RegisterIncidentTest("INC-2026-001", scaffold1);
generator.RegisterIncidentTest("INC-2026-001", scaffold2);
// Assert
generator.RegisteredTests["INC-2026-001"].Metadata.Title.Should().Be("Updated");
}
#endregion
#region GenerateReport Tests
[Fact]
public void GenerateReport_ReturnsEmptyForNoTests()
{
// Arrange
var generator = new IncidentTestGenerator();
// Act
var report = generator.GenerateReport();
// Assert
report.TotalTests.Should().Be(0);
report.Tests.Should().BeEmpty();
}
[Fact]
public void GenerateReport_CountsBySeverity()
{
// Arrange
var generator = new IncidentTestGenerator();
var p1Metadata = SampleMetadata with { IncidentId = "INC-001", Severity = IncidentSeverity.P1 };
var p2Metadata = SampleMetadata with { IncidentId = "INC-002", Severity = IncidentSeverity.P2 };
generator.RegisterIncidentTest("INC-001", generator.GenerateFromManifestJson(SampleManifestJson, p1Metadata));
generator.RegisterIncidentTest("INC-002", generator.GenerateFromManifestJson(SampleManifestJson, p2Metadata));
// Act
var report = generator.GenerateReport();
// Assert
report.TotalTests.Should().Be(2);
report.BySeveority.Should().ContainKey(IncidentSeverity.P1);
report.BySeveority.Should().ContainKey(IncidentSeverity.P2);
report.BySeveority[IncidentSeverity.P1].Should().Be(1);
report.BySeveority[IncidentSeverity.P2].Should().Be(1);
}
[Fact]
public void GenerateReport_CountsByModule()
{
// Arrange
var generator = new IncidentTestGenerator();
generator.RegisterIncidentTest("INC-001", generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata));
// Act
var report = generator.GenerateReport();
// Assert
report.ByModule.Should().ContainKey("EvidenceLocker");
report.ByModule.Should().ContainKey("Policy");
}
#endregion
#region TestScaffold Tests
[Fact]
public void TestScaffold_GenerateTestCode_ProducesValidCSharp()
{
// Arrange
var generator = new IncidentTestGenerator();
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Act
var code = scaffold.GenerateTestCode();
// Assert
code.Should().Contain("namespace StellaOps.EvidenceLocker.Tests.PostIncident");
code.Should().Contain($"public sealed class {scaffold.TestClassName}");
code.Should().Contain("[Fact]");
code.Should().Contain("[Trait(\"Category\", TestCategories.PostIncident)]");
code.Should().Contain($"[Trait(\"Incident\", \"{SampleMetadata.IncidentId}\")]");
}
[Fact]
public void TestScaffold_GenerateTestCode_IncludesIncidentMetadata()
{
// Arrange
var generator = new IncidentTestGenerator();
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Act
var code = scaffold.GenerateTestCode();
// Assert
code.Should().Contain("INC-2026-001");
code.Should().Contain("Race condition in concurrent writes");
code.Should().Contain("IncidentSeverity.P1");
}
[Fact]
public void TestScaffold_ToJson_ProducesValidJson()
{
// Arrange
var generator = new IncidentTestGenerator();
var scaffold = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
// Act
var json = scaffold.ToJson();
// Assert
json.Should().Contain("\"incidentId\"");
json.Should().Contain("\"testClassName\"");
json.Should().Contain("\"inputFixtures\"");
}
[Fact]
public void TestScaffold_FromJson_DeserializesCorrectly()
{
// Arrange
var generator = new IncidentTestGenerator();
var original = generator.GenerateFromManifestJson(SampleManifestJson, SampleMetadata);
var json = original.ToJson();
// Act
var deserialized = TestScaffold.FromJson(json);
// Assert
deserialized.Should().NotBeNull();
deserialized!.Metadata.IncidentId.Should().Be(original.Metadata.IncidentId);
deserialized.TestClassName.Should().Be(original.TestClassName);
}
#endregion
}

View File

@@ -0,0 +1,159 @@
using FluentAssertions;
using StellaOps.TestKit.Analysis;
using StellaOps.TestKit.Traits;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for <see cref="IntentCoverageReportGenerator"/> and <see cref="IntentCoverageReport"/>.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class IntentCoverageReportTests
{
[Fact]
public void TestIntents_All_ContainsAllCategories()
{
TestIntents.All.Should().BeEquivalentTo(new[]
{
TestIntents.Regulatory,
TestIntents.Safety,
TestIntents.Performance,
TestIntents.Competitive,
TestIntents.Operational
});
}
[Fact]
public void TestIntents_IsValid_ValidatesKnownIntents()
{
TestIntents.IsValid("Regulatory").Should().BeTrue();
TestIntents.IsValid("Safety").Should().BeTrue();
TestIntents.IsValid("Performance").Should().BeTrue();
TestIntents.IsValid("Competitive").Should().BeTrue();
TestIntents.IsValid("Operational").Should().BeTrue();
// Case insensitive
TestIntents.IsValid("regulatory").Should().BeTrue();
TestIntents.IsValid("SAFETY").Should().BeTrue();
// Invalid
TestIntents.IsValid("Unknown").Should().BeFalse();
TestIntents.IsValid("").Should().BeFalse();
}
[Fact]
public void IntentAttribute_CreatesTraits()
{
var attr = new IntentAttribute(TestIntents.Safety, "Security requirement");
attr.Intent.Should().Be(TestIntents.Safety);
attr.Rationale.Should().Be("Security requirement");
var traits = attr.GetTraits();
traits.Should().Contain(new KeyValuePair<string, string>("Intent", "Safety"));
traits.Should().Contain(new KeyValuePair<string, string>("IntentRationale", "Security requirement"));
}
[Fact]
public void IntentAttribute_WithoutRationale_OnlyIntentTrait()
{
var attr = new IntentAttribute(TestIntents.Operational);
attr.Intent.Should().Be(TestIntents.Operational);
attr.Rationale.Should().BeEmpty();
var traits = attr.GetTraits();
traits.Should().ContainSingle();
traits.Should().Contain(new KeyValuePair<string, string>("Intent", "Operational"));
}
[Fact]
public void IntentCoverageReportGenerator_EmptyAssemblies_ReturnsEmptyReport()
{
var generator = new IntentCoverageReportGenerator();
var report = generator.Generate();
report.TotalTests.Should().Be(0);
report.TaggedTests.Should().Be(0);
report.UntaggedTests.Should().Be(0);
report.TagCoveragePercent.Should().Be(0);
report.ModuleStats.Should().BeEmpty();
}
[Fact]
public void IntentCoverageReportGenerator_ScansSelfAssembly()
{
var generator = new IntentCoverageReportGenerator();
generator.AddAssembly(typeof(IntentCoverageReportTests).Assembly);
var report = generator.Generate();
// This test class has tests, so we should find something
report.TotalTests.Should().BeGreaterThan(0);
}
[Fact]
public void IntentCoverageReport_ToMarkdown_GeneratesValidOutput()
{
var report = new IntentCoverageReport
{
GeneratedAt = new DateTimeOffset(2026, 1, 27, 12, 0, 0, TimeSpan.Zero),
TotalTests = 100,
TaggedTests = 60,
UntaggedTests = 40,
TagCoveragePercent = 60.0,
IntentDistribution = new Dictionary<string, int>
{
[TestIntents.Safety] = 20,
[TestIntents.Regulatory] = 15,
[TestIntents.Operational] = 25,
[TestIntents.Performance] = 0,
[TestIntents.Competitive] = 0
},
ModuleStats = new Dictionary<string, ModuleIntentStatsReadOnly>
{
["Policy"] = new ModuleIntentStatsReadOnly
{
ModuleName = "Policy",
TotalTests = 50,
TaggedTests = 30,
TestsWithRationale = 10,
TagCoveragePercent = 60.0,
IntentCounts = new Dictionary<string, int>
{
[TestIntents.Safety] = 15,
[TestIntents.Regulatory] = 15
}
}
},
Warnings = new List<string>
{
"Low intent coverage: only 60.0% of tests have intent tags"
}
};
var markdown = report.ToMarkdown();
markdown.Should().Contain("# Intent Coverage Report");
markdown.Should().Contain("Total tests: 100");
markdown.Should().Contain("Tagged: 60 (60.0%)");
markdown.Should().Contain("## Intent Distribution");
markdown.Should().Contain("| Safety |");
markdown.Should().Contain("## Per-Module Coverage");
markdown.Should().Contain("| Policy |");
markdown.Should().Contain("## Warnings");
markdown.Should().Contain("Low intent coverage");
}
[Fact]
public void IntentCoverageReport_GeneratesWarning_WhenSafetyMissing()
{
var generator = new IntentCoverageReportGenerator();
// Empty assemblies means no Safety tests
var report = generator.Generate();
report.Warnings.Should().Contain("No tests tagged with Safety intent");
}
}

View File

@@ -0,0 +1,360 @@
using FluentAssertions;
using StellaOps.TestKit.Interop;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for cross-version interoperability testing infrastructure.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class InteropTests
{
#region SchemaVersionMatrix Tests
[Fact]
public void SchemaVersionMatrix_AddVersion_StoresSchema()
{
// Arrange
var matrix = new SchemaVersionMatrix();
var schema = new SchemaDefinition
{
RequiredFields = ["id", "name"]
};
// Act
matrix.AddVersion("1.0", schema);
// Assert
matrix.Versions.Should().Contain("1.0");
matrix.GetVersion("1.0").Should().Be(schema);
}
[Fact]
public void SchemaVersionMatrix_IsBackwardCompatible_ReturnsTrueWhenNoFieldsRemoved()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition
{
RequiredFields = ["id", "name"]
});
matrix.AddVersion("2.0", new SchemaDefinition
{
RequiredFields = ["id", "name", "type"], // Added field, none removed
OptionalFields = ["description"]
});
// Act & Assert
matrix.IsBackwardCompatible("1.0", "2.0").Should().BeTrue();
}
[Fact]
public void SchemaVersionMatrix_IsBackwardCompatible_ReturnsFalseWhenFieldsRemoved()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition
{
RequiredFields = ["id", "name", "oldField"]
});
matrix.AddVersion("2.0", new SchemaDefinition
{
RequiredFields = ["id", "name"] // oldField removed
});
// Act & Assert
matrix.IsBackwardCompatible("1.0", "2.0").Should().BeFalse();
}
[Fact]
public void SchemaVersionMatrix_IsForwardCompatible_ReturnsTrueWhenNewFieldsHaveDefaults()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition
{
RequiredFields = ["id", "name"]
});
matrix.AddVersion("2.0", new SchemaDefinition
{
RequiredFields = ["id", "name", "type"],
FieldDefaults = new() { ["type"] = "default" }
});
// Act & Assert
matrix.IsForwardCompatible("1.0", "2.0").Should().BeTrue();
}
[Fact]
public void SchemaVersionMatrix_IsForwardCompatible_ReturnsFalseWhenNewRequiredFieldsHaveNoDefaults()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition
{
RequiredFields = ["id", "name"]
});
matrix.AddVersion("2.0", new SchemaDefinition
{
RequiredFields = ["id", "name", "type"] // No default for "type"
});
// Act & Assert
matrix.IsForwardCompatible("1.0", "2.0").Should().BeFalse();
}
[Fact]
public void SchemaVersionMatrix_Analyze_GeneratesReport()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition { RequiredFields = ["id"] });
matrix.AddVersion("2.0", new SchemaDefinition { RequiredFields = ["id", "name"] });
// Act
var report = matrix.Analyze();
// Assert
report.Versions.Should().Contain(["1.0", "2.0"]);
report.Pairs.Should().HaveCount(2); // 1.0->2.0 and 2.0->1.0
report.GeneratedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
}
[Fact]
public void SchemaVersionMatrix_Analyze_DetectsTypeChanges()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition
{
RequiredFields = ["id"],
FieldTypes = new() { ["id"] = "int" }
});
matrix.AddVersion("2.0", new SchemaDefinition
{
RequiredFields = ["id"],
FieldTypes = new() { ["id"] = "string" } // Type changed
});
// Act
var report = matrix.Analyze();
// Assert
var pair = report.Pairs.First(p => p.FromVersion == "1.0" && p.ToVersion == "2.0");
pair.IsBackwardCompatible.Should().BeFalse();
pair.BackwardIssues.Should().Contain(i => i.Contains("Type changed"));
}
[Fact]
public void CompatibilityReport_ToMarkdown_ProducesValidMarkdown()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition { RequiredFields = ["id"] });
matrix.AddVersion("2.0", new SchemaDefinition { RequiredFields = ["id"] });
var report = matrix.Analyze();
// Act
var markdown = report.ToMarkdown();
// Assert
markdown.Should().Contain("# Schema Compatibility Report");
markdown.Should().Contain("| From | To |");
markdown.Should().Contain("1.0");
markdown.Should().Contain("2.0");
}
[Fact]
public void CompatibilityReport_ToJson_ProducesValidJson()
{
// Arrange
var matrix = new SchemaVersionMatrix();
matrix.AddVersion("1.0", new SchemaDefinition { RequiredFields = ["id"] });
var report = matrix.Analyze();
// Act
var json = report.ToJson();
// Assert
json.Should().Contain("\"generatedAt\"");
json.Should().Contain("\"versions\"");
}
#endregion
#region VersionCompatibilityFixture Tests
[Fact]
public async Task VersionCompatibilityFixture_Initialize_CreatesCurrentEndpoint()
{
// Arrange
var fixture = new VersionCompatibilityFixture
{
Config = new VersionCompatibilityConfig { CurrentVersion = "3.0" }
};
// Act
await fixture.InitializeAsync();
// Assert
fixture.CurrentEndpoint.Should().NotBeNull();
fixture.CurrentEndpoint!.Version.Should().Be("3.0");
fixture.CurrentEndpoint.IsHealthy.Should().BeTrue();
// Cleanup
await fixture.DisposeAsync();
}
[Fact]
public async Task VersionCompatibilityFixture_StartVersion_CreatesEndpoint()
{
// Arrange
var fixture = new VersionCompatibilityFixture();
await fixture.InitializeAsync();
// Act
var endpoint = await fixture.StartVersion("1.0", "EvidenceLocker");
// Assert
endpoint.Should().NotBeNull();
endpoint.Version.Should().Be("1.0");
endpoint.ServiceName.Should().Be("EvidenceLocker");
// Cleanup
await fixture.DisposeAsync();
}
[Fact]
public async Task VersionCompatibilityFixture_StartVersion_ReturnsSameEndpointForSameVersion()
{
// Arrange
var fixture = new VersionCompatibilityFixture();
await fixture.InitializeAsync();
// Act
var endpoint1 = await fixture.StartVersion("1.0", "Service");
var endpoint2 = await fixture.StartVersion("1.0", "Service");
// Assert
endpoint1.Should().BeSameAs(endpoint2);
// Cleanup
await fixture.DisposeAsync();
}
[Fact]
public async Task VersionCompatibilityFixture_TestHandshake_ReturnsSuccess()
{
// Arrange
var fixture = new VersionCompatibilityFixture();
await fixture.InitializeAsync();
var server = await fixture.StartVersion("1.0", "Service");
// Act
var result = await fixture.TestHandshake(fixture.CurrentEndpoint!, server);
// Assert
result.IsSuccess.Should().BeTrue();
result.ClientVersion.Should().Be(fixture.CurrentEndpoint!.Version);
result.ServerVersion.Should().Be("1.0");
// Cleanup
await fixture.DisposeAsync();
}
[Fact]
public async Task VersionCompatibilityFixture_TestMessageFormat_ReturnsSuccess()
{
// Arrange
var fixture = new VersionCompatibilityFixture();
await fixture.InitializeAsync();
var producer = await fixture.StartVersion("1.0", "Producer");
var consumer = await fixture.StartVersion("2.0", "Consumer");
// Act
var result = await fixture.TestMessageFormat(producer, consumer, "EvidenceBundle");
// Assert
result.IsSuccess.Should().BeTrue();
result.Message.Should().Contain("EvidenceBundle");
// Cleanup
await fixture.DisposeAsync();
}
[Fact]
public async Task VersionCompatibilityFixture_TestSchemaMigration_ReturnsSuccess()
{
// Arrange
var fixture = new VersionCompatibilityFixture();
await fixture.InitializeAsync();
// Act
var result = await fixture.TestSchemaMigration("1.0", "2.0", new { id = 1 });
// Assert
result.IsSuccess.Should().BeTrue();
result.FromVersion.Should().Be("1.0");
result.ToVersion.Should().Be("2.0");
result.DataPreserved.Should().BeTrue();
result.RollbackSupported.Should().BeTrue();
// Cleanup
await fixture.DisposeAsync();
}
[Fact]
public async Task VersionCompatibilityFixture_StopVersion_RemovesEndpoint()
{
// Arrange
var fixture = new VersionCompatibilityFixture();
await fixture.InitializeAsync();
await fixture.StartVersion("1.0", "Service");
// Act
await fixture.StopVersion("1.0", "Service");
var newEndpoint = await fixture.StartVersion("1.0", "Service");
// Assert - new endpoint should be created (different base URL due to increment)
newEndpoint.Should().NotBeNull();
// Cleanup
await fixture.DisposeAsync();
}
#endregion
#region ServiceEndpoint Tests
[Fact]
public void ServiceEndpoint_DefaultValues_AreSet()
{
// Arrange & Act
var endpoint = new ServiceEndpoint();
// Assert
endpoint.ServiceName.Should().BeEmpty();
endpoint.Version.Should().BeEmpty();
endpoint.BaseUrl.Should().BeEmpty();
endpoint.IsHealthy.Should().BeFalse();
}
#endregion
#region CompatibilityResult Tests
[Fact]
public void CompatibilityResult_DefaultValues_AreSet()
{
// Arrange & Act
var result = new CompatibilityResult();
// Assert
result.IsSuccess.Should().BeFalse();
result.Errors.Should().BeEmpty();
result.Warnings.Should().BeEmpty();
}
#endregion
}

View File

@@ -0,0 +1,387 @@
using FluentAssertions;
using StellaOps.TestKit.Longevity;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for time-extended stability testing infrastructure.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class LongevityTests
{
#region StabilityMetrics Tests
[Fact]
public void StabilityMetrics_CaptureBaseline_SetsBaseline()
{
// Arrange
var metrics = new StabilityMetrics();
// Act
metrics.CaptureBaseline();
// Assert
metrics.MemoryBaseline.Should().BeGreaterThan(0);
metrics.Snapshots.Should().HaveCount(1);
}
[Fact]
public void StabilityMetrics_CaptureSnapshot_AddsSnapshot()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
// Act
metrics.CaptureSnapshot();
metrics.CaptureSnapshot();
// Assert
metrics.Snapshots.Should().HaveCount(3); // Baseline + 2 snapshots
}
[Fact]
public void StabilityMetrics_RecordCounter_StoresValue()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
// Act
metrics.RecordCounter("requests_total", 100);
// Assert
metrics.CounterValues.Should().ContainKey("requests_total");
metrics.CounterValues["requests_total"].Should().Be(100);
}
[Fact]
public void StabilityMetrics_RecordConnectionPool_StoresValues()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
// Act
metrics.RecordConnectionPool(active: 5, leaked: 1);
// Assert
metrics.ConnectionPoolActive.Should().Be(5);
metrics.ConnectionPoolLeaked.Should().Be(1);
}
[Fact]
public void StabilityMetrics_HasMemoryLeak_ReturnsFalseInitially()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
// Act & Assert
metrics.HasMemoryLeak().Should().BeFalse();
}
[Fact]
public void StabilityMetrics_HasConnectionPoolLeak_DetectsLeaks()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
metrics.RecordConnectionPool(active: 10, leaked: 2);
// Act & Assert
metrics.HasConnectionPoolLeak(maxLeaked: 0).Should().BeTrue();
metrics.HasConnectionPoolLeak(maxLeaked: 2).Should().BeFalse();
}
[Fact]
public void StabilityMetrics_HasDrift_DetectsDriftingCounters()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
metrics.RecordCounter("counter", 100);
metrics.CaptureSnapshot();
metrics.RecordCounter("counter", 2000);
// Act & Assert
metrics.HasDrift("counter", threshold: 1000).Should().BeTrue();
metrics.HasDrift("counter", threshold: 5000).Should().BeFalse();
}
[Fact]
public void StabilityMetrics_MemoryGrowthRate_CalculatesSlope()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
// Capture multiple snapshots (growth rate requires at least 2)
for (int i = 0; i < 5; i++)
{
metrics.CaptureSnapshot();
}
// Act
var growthRate = metrics.MemoryGrowthRate;
// Assert - just verify it's calculated
growthRate.Should().BeOfType<double>();
}
[Fact]
public void StabilityMetrics_GenerateReport_CreatesValidReport()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
metrics.CaptureSnapshot();
metrics.RecordCounter("test", 42);
// Act
var report = metrics.GenerateReport();
// Assert
report.Should().NotBeNull();
report.SnapshotCount.Should().Be(2);
report.BaselineMemory.Should().BeGreaterThan(0);
report.GeneratedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
}
[Fact]
public void StabilityReport_ToJson_ProducesValidJson()
{
// Arrange
var metrics = new StabilityMetrics();
metrics.CaptureBaseline();
var report = metrics.GenerateReport();
// Act
var json = report.ToJson();
// Assert
json.Should().Contain("\"snapshotCount\"");
json.Should().Contain("\"baselineMemory\"");
json.Should().Contain("\"hasMemoryLeak\"");
}
[Fact]
public void StabilityReport_Passed_ReturnsTrueWhenNoIssues()
{
// Arrange
var report = new StabilityReport
{
HasMemoryLeak = false,
HasConnectionPoolLeak = false,
DriftingCounters = []
};
// Act & Assert
report.Passed.Should().BeTrue();
}
[Fact]
public void StabilityReport_Passed_ReturnsFalseWhenMemoryLeak()
{
// Arrange
var report = new StabilityReport
{
HasMemoryLeak = true,
HasConnectionPoolLeak = false,
DriftingCounters = []
};
// Act & Assert
report.Passed.Should().BeFalse();
}
#endregion
#region StabilityTestRunner Tests
[Fact]
public async Task StabilityTestRunner_RunIterations_ExecutesScenario()
{
// Arrange
var runner = new StabilityTestRunner
{
Config = new StabilityTestConfig { SnapshotInterval = 5 }
};
var executionCount = 0;
// Act
var report = await runner.RunIterations(
scenario: () =>
{
executionCount++;
return Task.CompletedTask;
},
iterations: 10);
// Assert
executionCount.Should().Be(10);
report.Should().NotBeNull();
}
[Fact]
public async Task StabilityTestRunner_RunIterations_CapturesSnapshots()
{
// Arrange
var runner = new StabilityTestRunner
{
Config = new StabilityTestConfig { SnapshotInterval = 2 }
};
// Act
var report = await runner.RunIterations(
scenario: () => Task.CompletedTask,
iterations: 10);
// Assert
report.SnapshotCount.Should().BeGreaterThan(1);
}
[Fact]
public async Task StabilityTestRunner_RunIterations_StopsOnErrorIfConfigured()
{
// Arrange
var runner = new StabilityTestRunner
{
Config = new StabilityTestConfig
{
StopOnError = true,
SnapshotInterval = 1
}
};
var executionCount = 0;
// Act
var report = await runner.RunIterations(
scenario: () =>
{
executionCount++;
if (executionCount == 5)
{
throw new InvalidOperationException("Test error");
}
return Task.CompletedTask;
},
iterations: 100);
// Assert
executionCount.Should().Be(5);
}
[Fact]
public async Task StabilityTestRunner_RunIterations_ContinuesOnErrorIfNotConfigured()
{
// Arrange
var runner = new StabilityTestRunner
{
Config = new StabilityTestConfig
{
StopOnError = false,
SnapshotInterval = 10
}
};
var executionCount = 0;
var errorCount = 0;
// Act
var report = await runner.RunIterations(
scenario: () =>
{
executionCount++;
if (executionCount % 3 == 0)
{
errorCount++;
throw new InvalidOperationException("Test error");
}
return Task.CompletedTask;
},
iterations: 10);
// Assert
executionCount.Should().Be(10);
errorCount.Should().BeGreaterThan(0);
}
[Fact]
public async Task StabilityTestRunner_RunExtended_RunsForDuration()
{
// Arrange
var runner = new StabilityTestRunner
{
Config = new StabilityTestConfig { SnapshotInterval = 100 }
};
var executionCount = 0;
// Act
var report = await runner.RunExtended(
scenario: () =>
{
executionCount++;
return Task.CompletedTask;
},
duration: TimeSpan.FromMilliseconds(100));
// Assert
executionCount.Should().BeGreaterThan(0);
report.Should().NotBeNull();
}
[Fact]
public async Task StabilityTestRunner_RunExtended_RespectsCancellation()
{
// Arrange
var runner = new StabilityTestRunner();
using var cts = new CancellationTokenSource();
var executionCount = 0;
// Act
cts.CancelAfter(50);
var report = await runner.RunExtended(
scenario: async () =>
{
executionCount++;
await Task.Delay(10);
},
duration: TimeSpan.FromHours(1),
cancellationToken: cts.Token);
// Assert
executionCount.Should().BeLessThan(100);
}
[Fact]
public void StabilityTestRunner_Metrics_ExposesUnderlyingMetrics()
{
// Arrange
var runner = new StabilityTestRunner();
// Act & Assert
runner.Metrics.Should().NotBeNull();
runner.Metrics.Should().BeOfType<StabilityMetrics>();
}
#endregion
#region StabilityTestConfig Tests
[Fact]
public void StabilityTestConfig_Defaults_AreReasonable()
{
// Arrange & Act
var config = new StabilityTestConfig();
// Assert
config.SnapshotInterval.Should().Be(100);
config.MemoryLeakThresholdPercent.Should().Be(10);
config.MaxConnectionPoolLeaks.Should().Be(0);
config.StopOnError.Should().BeFalse();
config.IterationDelay.Should().Be(TimeSpan.Zero);
}
#endregion
}

View File

@@ -0,0 +1,360 @@
using System.Diagnostics;
using System.Diagnostics.Metrics;
using System.Text.RegularExpressions;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using StellaOps.TestKit.Observability;
using Xunit;
namespace StellaOps.TestKit.Tests;
/// <summary>
/// Unit tests for observability contract assertions.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class ObservabilityContractTests
{
#region OTelContractAssert Tests
[Fact]
public void HasRequiredSpans_AllPresent_NoException()
{
using var source = new ActivitySource("TestSource");
using var capture = new OtelCapture("TestSource");
using (source.StartActivity("Span1")) { }
using (source.StartActivity("Span2")) { }
var act = () => OTelContractAssert.HasRequiredSpans(capture, "Span1", "Span2");
act.Should().NotThrow();
}
[Fact]
public void HasRequiredSpans_Missing_ThrowsContractViolation()
{
using var source = new ActivitySource("TestSource2");
using var capture = new OtelCapture("TestSource2");
using (source.StartActivity("Span1")) { }
var act = () => OTelContractAssert.HasRequiredSpans(capture, "Span1", "MissingSpan");
act.Should().Throw<ContractViolationException>()
.WithMessage("*MissingSpan*");
}
[Fact]
public void SpanHasAttributes_AllPresent_NoException()
{
using var source = new ActivitySource("TestSource3");
using var capture = new OtelCapture("TestSource3");
using (var activity = source.StartActivity("TestSpan"))
{
activity?.SetTag("user_id", "123");
activity?.SetTag("tenant_id", "acme");
}
var span = capture.CapturedActivities.First();
var act = () => OTelContractAssert.SpanHasAttributes(span, "user_id", "tenant_id");
act.Should().NotThrow();
}
[Fact]
public void SpanHasAttributes_Missing_ThrowsContractViolation()
{
using var source = new ActivitySource("TestSource4");
using var capture = new OtelCapture("TestSource4");
using (var activity = source.StartActivity("TestSpan"))
{
activity?.SetTag("user_id", "123");
}
var span = capture.CapturedActivities.First();
var act = () => OTelContractAssert.SpanHasAttributes(span, "user_id", "missing_attr");
act.Should().Throw<ContractViolationException>()
.WithMessage("*missing_attr*");
}
[Fact]
public void AttributeCardinality_WithinThreshold_NoException()
{
using var source = new ActivitySource("TestSource5");
using var capture = new OtelCapture("TestSource5");
for (int i = 0; i < 5; i++)
{
using (var activity = source.StartActivity($"Span{i}"))
{
activity?.SetTag("status", i % 3 == 0 ? "ok" : "error"); // 2 unique values
}
}
var act = () => OTelContractAssert.AttributeCardinality(capture, "status", maxCardinality: 10);
act.Should().NotThrow();
}
[Fact]
public void AttributeCardinality_ExceedsThreshold_ThrowsContractViolation()
{
using var source = new ActivitySource("TestSource6");
using var capture = new OtelCapture("TestSource6");
for (int i = 0; i < 10; i++)
{
using (var activity = source.StartActivity($"Span{i}"))
{
activity?.SetTag("request_id", $"id-{i}"); // 10 unique values
}
}
var act = () => OTelContractAssert.AttributeCardinality(capture, "request_id", maxCardinality: 5);
act.Should().Throw<ContractViolationException>()
.WithMessage("*cardinality*exceeds*");
}
#endregion
#region LogContractAssert Tests
[Fact]
public void HasRequiredFields_AllPresent_NoException()
{
var record = new CapturedLogRecord
{
LogLevel = LogLevel.Information,
Message = "Test message",
StateValues = new Dictionary<string, object?>
{
["CorrelationId"] = "abc-123",
["TenantId"] = "acme"
}
};
var act = () => LogContractAssert.HasRequiredFields(record, "CorrelationId", "TenantId");
act.Should().NotThrow();
}
[Fact]
public void HasRequiredFields_Missing_ThrowsContractViolation()
{
var record = new CapturedLogRecord
{
LogLevel = LogLevel.Information,
Message = "Test message",
StateValues = new Dictionary<string, object?>
{
["CorrelationId"] = "abc-123"
}
};
var act = () => LogContractAssert.HasRequiredFields(record, "CorrelationId", "MissingField");
act.Should().Throw<ContractViolationException>()
.WithMessage("*MissingField*");
}
[Fact]
public void NoSensitiveData_Clean_NoException()
{
var records = new[]
{
new CapturedLogRecord
{
LogLevel = LogLevel.Information,
Message = "User logged in successfully",
StateValues = new Dictionary<string, object?>
{
["UserId"] = "user-123"
}
}
};
var piiPatterns = new[] { new Regex(@"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b") };
var act = () => LogContractAssert.NoSensitiveData(records, piiPatterns);
act.Should().NotThrow();
}
[Fact]
public void NoSensitiveData_ContainsEmail_ThrowsContractViolation()
{
var records = new[]
{
new CapturedLogRecord
{
LogLevel = LogLevel.Information,
Message = "User test@example.com logged in",
StateValues = new Dictionary<string, object?>()
}
};
var piiPatterns = new[] { new Regex(@"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b") };
var act = () => LogContractAssert.NoSensitiveData(records, piiPatterns);
act.Should().Throw<ContractViolationException>()
.WithMessage("*PII*");
}
[Fact]
public void LogLevelAppropriate_WithinRange_NoException()
{
var record = new CapturedLogRecord
{
LogLevel = LogLevel.Warning,
Message = "Test warning"
};
var act = () => LogContractAssert.LogLevelAppropriate(record, LogLevel.Information, LogLevel.Error);
act.Should().NotThrow();
}
[Fact]
public void LogLevelAppropriate_OutsideRange_ThrowsContractViolation()
{
var record = new CapturedLogRecord
{
LogLevel = LogLevel.Critical,
Message = "Critical error"
};
var act = () => LogContractAssert.LogLevelAppropriate(record, LogLevel.Information, LogLevel.Warning);
act.Should().Throw<ContractViolationException>()
.WithMessage("*Critical*outside*range*");
}
#endregion
#region MetricsContractAssert Tests
[Fact]
public void MetricExists_Present_NoException()
{
using var meter = new Meter("TestMeter1");
using var capture = new MetricsCapture("TestMeter1");
var counter = meter.CreateCounter<long>("test_requests_total");
counter.Add(1);
var act = () => MetricsContractAssert.MetricExists(capture, "test_requests_total");
act.Should().NotThrow();
}
[Fact]
public void MetricExists_Missing_ThrowsContractViolation()
{
using var meter = new Meter("TestMeter2");
using var capture = new MetricsCapture("TestMeter2");
var counter = meter.CreateCounter<long>("some_other_metric");
counter.Add(1);
var act = () => MetricsContractAssert.MetricExists(capture, "missing_metric");
act.Should().Throw<ContractViolationException>()
.WithMessage("*missing_metric*not found*");
}
[Fact]
public void LabelCardinalityBounded_WithinThreshold_NoException()
{
using var meter = new Meter("TestMeter3");
using var capture = new MetricsCapture("TestMeter3");
var counter = meter.CreateCounter<long>("http_requests_total");
counter.Add(1, new KeyValuePair<string, object?>("method", "GET"));
counter.Add(1, new KeyValuePair<string, object?>("method", "POST"));
var act = () => MetricsContractAssert.LabelCardinalityBounded(capture, "http_requests_total", maxLabels: 10);
act.Should().NotThrow();
}
[Fact]
public void LabelCardinalityBounded_ExceedsThreshold_ThrowsContractViolation()
{
using var meter = new Meter("TestMeter4");
using var capture = new MetricsCapture("TestMeter4");
var counter = meter.CreateCounter<long>("requests_by_user");
for (int i = 0; i < 10; i++)
{
counter.Add(1, new KeyValuePair<string, object?>("user_id", $"user-{i}"));
}
var act = () => MetricsContractAssert.LabelCardinalityBounded(capture, "requests_by_user", maxLabels: 5);
act.Should().Throw<ContractViolationException>()
.WithMessage("*cardinality*exceeds*");
}
[Fact]
public void CounterMonotonic_AlwaysIncreasing_NoException()
{
using var meter = new Meter("TestMeter5");
using var capture = new MetricsCapture("TestMeter5");
var counter = meter.CreateCounter<long>("monotonic_counter");
counter.Add(1);
counter.Add(2);
counter.Add(3);
var act = () => MetricsContractAssert.CounterMonotonic(capture, "monotonic_counter");
act.Should().NotThrow();
}
[Fact]
public void GaugeInBounds_WithinRange_NoException()
{
using var meter = new Meter("TestMeter6");
using var capture = new MetricsCapture("TestMeter6");
var gauge = meter.CreateObservableGauge("memory_usage_bytes", () => 500);
// Force a measurement
capture.GetValues("memory_usage_bytes");
// This test validates the API structure - actual observable gauge testing
// requires meter listener callbacks which are triggered asynchronously
var act = () => MetricsContractAssert.GaugeInBounds(capture, "memory_usage_bytes", 0, 1000);
act.Should().NotThrow();
}
#endregion
#region ContractViolationException Tests
[Fact]
public void ContractViolationException_ContainsMessage()
{
var ex = new ContractViolationException("Test violation");
ex.Message.Should().Be("Test violation");
}
[Fact]
public void ContractViolationException_WithInnerException()
{
var inner = new InvalidOperationException("Inner error");
var ex = new ContractViolationException("Outer error", inner);
ex.Message.Should().Be("Outer error");
ex.InnerException.Should().Be(inner);
}
#endregion
}