sln build fix (again), tests fixes, audit work and doctors work

This commit is contained in:
master
2026-01-12 22:15:51 +02:00
parent 9873f80830
commit 9330c64349
812 changed files with 48051 additions and 3891 deletions

View File

@@ -0,0 +1,236 @@
// -----------------------------------------------------------------------------
// ContractSpecDiffTests.cs
// Tests that verify OpenAPI specifications match code implementations
// Sprint: Testing Enhancement Advisory - Phase 1.1
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Architecture.Contracts.Tests.Infrastructure;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Architecture.Contracts.Tests;
/// <summary>
/// Contract specification diff tests.
/// Verifies that OpenAPI specifications match actual code implementations.
/// </summary>
[Trait("Category", TestCategories.Architecture)]
[Trait("Category", TestCategories.Contract)]
public class ContractSpecDiffTests
{
private static readonly string RepoRoot = FindRepoRoot();
private static readonly string DocsApiPath = Path.Combine(RepoRoot, "docs", "api");
private static readonly string SrcPath = Path.Combine(RepoRoot, "src");
/// <summary>
/// Verifies that all OpenAPI spec files can be parsed without errors.
/// </summary>
[Fact]
public void AllOpenApiSpecs_CanBeParsed()
{
// Arrange
var specFiles = GetOpenApiSpecFiles();
// Act & Assert
foreach (var specFile in specFiles)
{
var action = () => OpenApiParser.Parse(specFile);
action.Should().NotThrow($"Spec file {Path.GetFileName(specFile)} should parse successfully");
}
}
/// <summary>
/// Verifies that OpenAPI specs contain required metadata.
/// </summary>
[Fact]
public void AllOpenApiSpecs_HaveRequiredMetadata()
{
// Arrange
var specFiles = GetOpenApiSpecFiles();
// Act & Assert
foreach (var specFile in specFiles)
{
var spec = OpenApiParser.Parse(specFile);
spec.Title.Should().NotBeNullOrWhiteSpace(
$"Spec {Path.GetFileName(specFile)} should have a title");
spec.Version.Should().NotBeNullOrWhiteSpace(
$"Spec {Path.GetFileName(specFile)} should have a version");
}
}
/// <summary>
/// Verifies that discovered endpoints have proper response codes defined.
/// </summary>
[Fact]
public void DiscoveredEndpoints_HaveResponseCodes()
{
// Arrange
var endpoints = DiscoverAllEndpoints();
// Act & Assert
foreach (var endpoint in endpoints)
{
// Skip if no Produces() annotations found (may be using different pattern)
if (endpoint.ResponseCodes.IsEmpty)
{
continue;
}
endpoint.ResponseCodes.Should().Contain(
c => c >= 200 && c < 300,
$"Endpoint {endpoint.Method} {endpoint.Path} should have a success response code");
}
}
/// <summary>
/// Verifies endpoint discovery works correctly on known endpoint files.
/// </summary>
[Fact]
public void EndpointDiscovery_FindsKnownEndpoints()
{
// Arrange
var scannerEndpointsFile = Path.Combine(
SrcPath, "Scanner", "StellaOps.Scanner.WebService", "Endpoints", "ScanEndpoints.cs");
if (!File.Exists(scannerEndpointsFile))
{
// Skip if file doesn't exist (may be different structure)
return;
}
// Act
var endpoints = EndpointDiscoverer.DiscoverFromFile(scannerEndpointsFile);
// Assert
endpoints.Should().NotBeEmpty("Scanner endpoints file should contain discoverable endpoints");
endpoints.Should().Contain(e => e.Method == "POST", "Should find POST endpoint for scan submission");
endpoints.Should().Contain(e => e.Method == "GET", "Should find GET endpoints for status");
}
/// <summary>
/// Verifies that OpenAPI specs have unique operation IDs.
/// </summary>
[Fact]
public void OpenApiSpecs_HaveUniqueOperationIds()
{
// Arrange
var specs = LoadAllSpecs();
var allOperationIds = new Dictionary<string, List<string>>();
// Act
foreach (var spec in specs)
{
foreach (var endpoint in spec.Endpoints.Where(e => !string.IsNullOrEmpty(e.OperationId)))
{
var opId = endpoint.OperationId!;
if (!allOperationIds.ContainsKey(opId))
{
allOperationIds[opId] = [];
}
allOperationIds[opId].Add($"{spec.SourcePath}: {endpoint.Method} {endpoint.Path}");
}
}
// Assert
var duplicates = allOperationIds.Where(kv => kv.Value.Count > 1).ToList();
duplicates.Should().BeEmpty(
$"Operation IDs should be unique. Duplicates found: {string.Join(", ", duplicates.Select(d => d.Key))}");
}
/// <summary>
/// Generates a spec-diff report (informational, does not fail).
/// This test produces a report showing differences between specs and code.
/// </summary>
[Fact]
public void SpecDiff_GeneratesReport()
{
// Arrange
var specs = LoadAllSpecs();
var endpoints = DiscoverAllEndpoints();
// Act
var result = SpecDiffComparer.Compare(specs, endpoints);
var report = SpecDiffComparer.GenerateReport(result);
// Assert - just verify report was generated
report.Should().NotBeNullOrEmpty("Spec diff report should be generated");
// Output for visibility (will show in test output)
Console.WriteLine(report);
}
#region Helper Methods
private static string FindRepoRoot()
{
var current = Directory.GetCurrentDirectory();
while (current is not null)
{
if (Directory.Exists(Path.Combine(current, ".git")) ||
File.Exists(Path.Combine(current, "CLAUDE.md")))
{
return current;
}
current = Directory.GetParent(current)?.FullName;
}
// Fallback: assume we're in a test output directory
return Path.GetFullPath(Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "..", ".."));
}
private static ImmutableArray<string> GetOpenApiSpecFiles()
{
if (!Directory.Exists(DocsApiPath))
{
return [];
}
return Directory.GetFiles(DocsApiPath, "*.yaml", SearchOption.AllDirectories)
.Where(f => f.Contains("openapi", StringComparison.OrdinalIgnoreCase) ||
Path.GetFileName(f).EndsWith("-api.yaml", StringComparison.OrdinalIgnoreCase))
.ToImmutableArray();
}
private static ImmutableArray<OpenApiSpec> LoadAllSpecs()
{
var specFiles = GetOpenApiSpecFiles();
var specs = new List<OpenApiSpec>();
foreach (var file in specFiles)
{
try
{
specs.Add(OpenApiParser.Parse(file));
}
catch
{
// Skip unparseable specs
}
}
return [.. specs];
}
private static ImmutableArray<DiscoveredEndpoint> DiscoverAllEndpoints()
{
var allEndpoints = new List<DiscoveredEndpoint>();
// Discover from all WebService directories
var webServiceDirs = Directory.GetDirectories(SrcPath, "*WebService*", SearchOption.AllDirectories);
foreach (var dir in webServiceDirs)
{
allEndpoints.AddRange(EndpointDiscoverer.DiscoverFromDirectory(dir));
}
return [.. allEndpoints];
}
#endregion
}

View File

@@ -0,0 +1,183 @@
// -----------------------------------------------------------------------------
// EndpointDiscoverer.cs
// Discovers API endpoints from source code using static analysis
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Globalization;
using System.Text.RegularExpressions;
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
/// <summary>
/// Discovers API endpoints from C# source files using regex-based static analysis.
/// </summary>
public static partial class EndpointDiscoverer
{
// Regex patterns for endpoint discovery
[GeneratedRegex(@"\.Map(Get|Post|Put|Delete|Patch)\s*\(\s*""([^""]+)""", RegexOptions.Compiled)]
private static partial Regex MapMethodRegex();
[GeneratedRegex(@"\.WithName\s*\(\s*""([^""]+)""", RegexOptions.Compiled)]
private static partial Regex WithNameRegex();
[GeneratedRegex(@"\.Produces(?:<[^>]+>)?\s*\(\s*(?:StatusCodes\.)?Status(\d+)", RegexOptions.Compiled)]
private static partial Regex ProducesRegex();
[GeneratedRegex(@"\.RequireAuthorization\s*\(", RegexOptions.Compiled)]
private static partial Regex RequireAuthRegex();
[GeneratedRegex(@"\.MapGroup\s*\(\s*""([^""]+)""", RegexOptions.Compiled)]
private static partial Regex MapGroupRegex();
/// <summary>
/// Discovers endpoints from all C# files in a directory.
/// </summary>
public static ImmutableArray<DiscoveredEndpoint> DiscoverFromDirectory(string directory)
{
ArgumentException.ThrowIfNullOrWhiteSpace(directory);
if (!Directory.Exists(directory))
{
return [];
}
var endpoints = new List<DiscoveredEndpoint>();
var csFiles = Directory.GetFiles(directory, "*.cs", SearchOption.AllDirectories)
.Where(f => f.Contains("Endpoints", StringComparison.OrdinalIgnoreCase) ||
f.Contains("Controllers", StringComparison.OrdinalIgnoreCase));
foreach (var file in csFiles)
{
endpoints.AddRange(DiscoverFromFile(file));
}
return [.. endpoints];
}
/// <summary>
/// Discovers endpoints from a single C# source file.
/// </summary>
public static ImmutableArray<DiscoveredEndpoint> DiscoverFromFile(string filePath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
if (!File.Exists(filePath))
{
return [];
}
var content = File.ReadAllText(filePath);
var lines = content.Split('\n');
var endpoints = new List<DiscoveredEndpoint>();
// Try to find the base group path
var baseGroupMatch = MapGroupRegex().Match(content);
var baseGroup = baseGroupMatch.Success ? baseGroupMatch.Groups[1].Value : "";
for (var lineNum = 0; lineNum < lines.Length; lineNum++)
{
var line = lines[lineNum];
var mapMatch = MapMethodRegex().Match(line);
if (!mapMatch.Success)
{
continue;
}
var method = mapMatch.Groups[1].Value.ToUpperInvariant();
var path = mapMatch.Groups[2].Value;
// Look ahead for chained methods (Produces, WithName, RequireAuthorization)
var chainedContent = GetChainedContent(lines, lineNum);
var endpointName = ExtractWithName(chainedContent);
var responseCodes = ExtractProducesCodes(chainedContent);
var requiresAuth = RequireAuthRegex().IsMatch(chainedContent);
// Combine base group with path
var fullPath = CombinePaths(baseGroup, path);
endpoints.Add(new DiscoveredEndpoint
{
Method = method,
Path = fullPath,
EndpointName = endpointName,
SourceFile = filePath,
SourceLine = lineNum + 1,
ResponseCodes = [.. responseCodes.OrderBy(c => c)],
RequiresAuth = requiresAuth
});
}
return [.. endpoints];
}
private static string GetChainedContent(string[] lines, int startLine)
{
var builder = new System.Text.StringBuilder();
var openParens = 0;
var started = false;
for (var i = startLine; i < Math.Min(startLine + 15, lines.Length); i++)
{
var line = lines[i];
builder.AppendLine(line);
foreach (var ch in line)
{
if (ch == '(') { openParens++; started = true; }
if (ch == ')') { openParens--; }
}
// Stop if we hit a semicolon at end of statement
if (started && line.TrimEnd().EndsWith(';'))
{
break;
}
// Stop if we've closed all parens and see another Map call
if (started && openParens <= 0 && MapMethodRegex().IsMatch(lines[Math.Min(i + 1, lines.Length - 1)]))
{
break;
}
}
return builder.ToString();
}
private static string? ExtractWithName(string content)
{
var match = WithNameRegex().Match(content);
return match.Success ? match.Groups[1].Value : null;
}
private static List<int> ExtractProducesCodes(string content)
{
var codes = new List<int>();
var matches = ProducesRegex().Matches(content);
foreach (Match match in matches)
{
if (int.TryParse(match.Groups[1].Value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var code))
{
codes.Add(code);
}
}
return codes;
}
private static string CombinePaths(string basePath, string path)
{
if (string.IsNullOrEmpty(basePath))
{
return path;
}
basePath = basePath.TrimEnd('/');
path = path.TrimStart('/');
return $"{basePath}/{path}";
}
}

View File

@@ -0,0 +1,149 @@
// -----------------------------------------------------------------------------
// OpenApiParser.cs
// Parses OpenAPI YAML specifications into structured format
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Globalization;
using YamlDotNet.RepresentationModel;
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
/// <summary>
/// Parses OpenAPI 3.x YAML specifications.
/// </summary>
public static class OpenApiParser
{
private static readonly string[] HttpMethods = ["get", "post", "put", "delete", "patch", "options", "head"];
/// <summary>
/// Parses an OpenAPI specification from a YAML file.
/// </summary>
public static OpenApiSpec Parse(string filePath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
var content = File.ReadAllText(filePath);
return ParseContent(content, filePath);
}
/// <summary>
/// Parses OpenAPI specification from YAML content.
/// </summary>
public static OpenApiSpec ParseContent(string yamlContent, string sourcePath)
{
var yaml = new YamlStream();
using var reader = new StringReader(yamlContent);
yaml.Load(reader);
if (yaml.Documents.Count == 0)
{
throw new InvalidOperationException($"No YAML documents found in {sourcePath}");
}
var root = (YamlMappingNode)yaml.Documents[0].RootNode;
var info = GetMappingNode(root, "info");
var title = GetScalarValue(info, "title") ?? "Untitled";
var version = GetScalarValue(info, "version") ?? "1.0.0";
var endpoints = new List<OpenApiEndpoint>();
if (root.Children.TryGetValue(new YamlScalarNode("paths"), out var pathsNode) &&
pathsNode is YamlMappingNode paths)
{
foreach (var pathEntry in paths.Children)
{
var path = ((YamlScalarNode)pathEntry.Key).Value ?? "";
if (pathEntry.Value is YamlMappingNode pathItem)
{
endpoints.AddRange(ParsePathItem(path, pathItem));
}
}
}
return new OpenApiSpec
{
SourcePath = sourcePath,
Title = title,
Version = version,
Endpoints = [.. endpoints]
};
}
private static IEnumerable<OpenApiEndpoint> ParsePathItem(string path, YamlMappingNode pathItem)
{
foreach (var methodEntry in pathItem.Children)
{
var methodName = ((YamlScalarNode)methodEntry.Key).Value?.ToLowerInvariant() ?? "";
if (!HttpMethods.Contains(methodName))
{
continue;
}
if (methodEntry.Value is not YamlMappingNode operation)
{
continue;
}
var operationId = GetScalarValue(operation, "operationId");
var summary = GetScalarValue(operation, "summary");
var responseCodes = new List<int>();
if (operation.Children.TryGetValue(new YamlScalarNode("responses"), out var responsesNode) &&
responsesNode is YamlMappingNode responses)
{
foreach (var responseEntry in responses.Children)
{
var codeStr = ((YamlScalarNode)responseEntry.Key).Value ?? "";
if (int.TryParse(codeStr, NumberStyles.Integer, CultureInfo.InvariantCulture, out var code))
{
responseCodes.Add(code);
}
}
}
var requiresAuth = false;
if (operation.Children.TryGetValue(new YamlScalarNode("security"), out var securityNode))
{
requiresAuth = securityNode is YamlSequenceNode { Children.Count: > 0 };
}
yield return new OpenApiEndpoint
{
Method = methodName.ToUpperInvariant(),
Path = path,
OperationId = operationId,
Summary = summary,
ResponseCodes = [.. responseCodes.OrderBy(c => c)],
RequiresAuth = requiresAuth
};
}
}
private static YamlMappingNode? GetMappingNode(YamlMappingNode parent, string key)
{
if (parent.Children.TryGetValue(new YamlScalarNode(key), out var node) &&
node is YamlMappingNode mapping)
{
return mapping;
}
return null;
}
private static string? GetScalarValue(YamlMappingNode? parent, string key)
{
if (parent is null)
{
return null;
}
if (parent.Children.TryGetValue(new YamlScalarNode(key), out var node) &&
node is YamlScalarNode scalar)
{
return scalar.Value;
}
return null;
}
}

View File

@@ -0,0 +1,193 @@
// -----------------------------------------------------------------------------
// OpenApiSpec.cs
// Contract spec-diff infrastructure for comparing OpenAPI specs against code
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
/// <summary>
/// Represents a parsed OpenAPI specification.
/// </summary>
public sealed record OpenApiSpec
{
/// <summary>
/// The source file path of the specification.
/// </summary>
public required string SourcePath { get; init; }
/// <summary>
/// API title from the info section.
/// </summary>
public required string Title { get; init; }
/// <summary>
/// API version from the info section.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// All endpoint definitions from the paths section.
/// </summary>
public required ImmutableArray<OpenApiEndpoint> Endpoints { get; init; }
}
/// <summary>
/// Represents a single endpoint definition from an OpenAPI spec.
/// </summary>
public sealed record OpenApiEndpoint
{
/// <summary>
/// The HTTP method (GET, POST, PUT, DELETE, PATCH).
/// </summary>
public required string Method { get; init; }
/// <summary>
/// The path pattern (e.g., /api/v1/scans/{scanId}).
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Operation ID if specified.
/// </summary>
public string? OperationId { get; init; }
/// <summary>
/// Summary description of the endpoint.
/// </summary>
public string? Summary { get; init; }
/// <summary>
/// Expected response status codes.
/// </summary>
public required ImmutableArray<int> ResponseCodes { get; init; }
/// <summary>
/// Whether the endpoint requires authentication.
/// </summary>
public bool RequiresAuth { get; init; }
/// <summary>
/// Creates a normalized key for comparison.
/// </summary>
public string ToComparisonKey() => $"{Method.ToUpperInvariant()} {NormalizePath(Path)}";
private static string NormalizePath(string path)
{
// Normalize path parameter syntax: {param} -> {*}
return System.Text.RegularExpressions.Regex.Replace(
path,
@"\{[^}]+\}",
"{*}");
}
}
/// <summary>
/// Represents an endpoint discovered from code via reflection or static analysis.
/// </summary>
public sealed record DiscoveredEndpoint
{
/// <summary>
/// The HTTP method (GET, POST, PUT, DELETE, PATCH).
/// </summary>
public required string Method { get; init; }
/// <summary>
/// The route pattern from code.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// The endpoint name (from WithName()).
/// </summary>
public string? EndpointName { get; init; }
/// <summary>
/// The source file where this endpoint is defined.
/// </summary>
public required string SourceFile { get; init; }
/// <summary>
/// Line number in source file.
/// </summary>
public int SourceLine { get; init; }
/// <summary>
/// Expected response status codes from Produces() attributes.
/// </summary>
public required ImmutableArray<int> ResponseCodes { get; init; }
/// <summary>
/// Whether the endpoint requires authorization.
/// </summary>
public bool RequiresAuth { get; init; }
/// <summary>
/// Creates a normalized key for comparison.
/// </summary>
public string ToComparisonKey() => $"{Method.ToUpperInvariant()} {NormalizePath(Path)}";
private static string NormalizePath(string path)
{
return System.Text.RegularExpressions.Regex.Replace(
path,
@"\{[^}]+\}",
"{*}");
}
}
/// <summary>
/// Result of comparing spec endpoints against discovered endpoints.
/// </summary>
public sealed record SpecDiffResult
{
/// <summary>
/// Endpoints in spec but not in code (orphaned specs).
/// </summary>
public required ImmutableArray<OpenApiEndpoint> OrphanedSpecs { get; init; }
/// <summary>
/// Endpoints in code but not in spec (undocumented).
/// </summary>
public required ImmutableArray<DiscoveredEndpoint> UndocumentedEndpoints { get; init; }
/// <summary>
/// Endpoints with mismatched response codes.
/// </summary>
public required ImmutableArray<ResponseCodeMismatch> ResponseMismatches { get; init; }
/// <summary>
/// Endpoints with mismatched auth requirements.
/// </summary>
public required ImmutableArray<AuthMismatch> AuthMismatches { get; init; }
/// <summary>
/// Whether the diff is clean (no issues found).
/// </summary>
public bool IsClean =>
OrphanedSpecs.IsEmpty &&
UndocumentedEndpoints.IsEmpty &&
ResponseMismatches.IsEmpty &&
AuthMismatches.IsEmpty;
}
/// <summary>
/// Represents a mismatch in response codes between spec and code.
/// </summary>
public sealed record ResponseCodeMismatch
{
public required string EndpointKey { get; init; }
public required ImmutableArray<int> SpecCodes { get; init; }
public required ImmutableArray<int> CodeCodes { get; init; }
}
/// <summary>
/// Represents a mismatch in auth requirements between spec and code.
/// </summary>
public sealed record AuthMismatch
{
public required string EndpointKey { get; init; }
public required bool SpecRequiresAuth { get; init; }
public required bool CodeRequiresAuth { get; init; }
}

View File

@@ -0,0 +1,156 @@
// -----------------------------------------------------------------------------
// SpecDiffComparer.cs
// Compares OpenAPI specifications against discovered endpoints
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
/// <summary>
/// Compares OpenAPI specifications against discovered code endpoints.
/// </summary>
public static class SpecDiffComparer
{
/// <summary>
/// Compares a set of OpenAPI specs against discovered endpoints.
/// </summary>
public static SpecDiffResult Compare(
IEnumerable<OpenApiSpec> specs,
IEnumerable<DiscoveredEndpoint> discovered)
{
var specEndpoints = specs
.SelectMany(s => s.Endpoints)
.ToDictionary(e => e.ToComparisonKey(), e => e);
var codeEndpoints = discovered
.ToDictionary(e => e.ToComparisonKey(), e => e);
var orphanedSpecs = new List<OpenApiEndpoint>();
var undocumented = new List<DiscoveredEndpoint>();
var responseMismatches = new List<ResponseCodeMismatch>();
var authMismatches = new List<AuthMismatch>();
// Find orphaned specs (in spec but not in code)
foreach (var (key, specEndpoint) in specEndpoints)
{
if (!codeEndpoints.ContainsKey(key))
{
orphanedSpecs.Add(specEndpoint);
}
}
// Find undocumented endpoints (in code but not in spec)
foreach (var (key, codeEndpoint) in codeEndpoints)
{
if (!specEndpoints.ContainsKey(key))
{
undocumented.Add(codeEndpoint);
}
}
// Find mismatches in matching endpoints
foreach (var (key, specEndpoint) in specEndpoints)
{
if (!codeEndpoints.TryGetValue(key, out var codeEndpoint))
{
continue;
}
// Check response codes
if (!specEndpoint.ResponseCodes.SequenceEqual(codeEndpoint.ResponseCodes))
{
responseMismatches.Add(new ResponseCodeMismatch
{
EndpointKey = key,
SpecCodes = specEndpoint.ResponseCodes,
CodeCodes = codeEndpoint.ResponseCodes
});
}
// Check auth requirements
if (specEndpoint.RequiresAuth != codeEndpoint.RequiresAuth)
{
authMismatches.Add(new AuthMismatch
{
EndpointKey = key,
SpecRequiresAuth = specEndpoint.RequiresAuth,
CodeRequiresAuth = codeEndpoint.RequiresAuth
});
}
}
return new SpecDiffResult
{
OrphanedSpecs = [.. orphanedSpecs],
UndocumentedEndpoints = [.. undocumented],
ResponseMismatches = [.. responseMismatches],
AuthMismatches = [.. authMismatches]
};
}
/// <summary>
/// Generates a human-readable diff report.
/// </summary>
public static string GenerateReport(SpecDiffResult result)
{
var builder = new System.Text.StringBuilder();
builder.AppendLine("# Spec-Diff Report");
builder.AppendLine();
if (result.IsClean)
{
builder.AppendLine("No differences found. Specs and code are in sync.");
return builder.ToString();
}
if (result.OrphanedSpecs.Length > 0)
{
builder.AppendLine("## Orphaned Specs (in spec but not in code)");
builder.AppendLine();
foreach (var endpoint in result.OrphanedSpecs)
{
builder.AppendLine($"- {endpoint.Method} {endpoint.Path}");
}
builder.AppendLine();
}
if (result.UndocumentedEndpoints.Length > 0)
{
builder.AppendLine("## Undocumented Endpoints (in code but not in spec)");
builder.AppendLine();
foreach (var endpoint in result.UndocumentedEndpoints)
{
builder.AppendLine($"- {endpoint.Method} {endpoint.Path} ({endpoint.SourceFile}:{endpoint.SourceLine})");
}
builder.AppendLine();
}
if (result.ResponseMismatches.Length > 0)
{
builder.AppendLine("## Response Code Mismatches");
builder.AppendLine();
foreach (var mismatch in result.ResponseMismatches)
{
builder.AppendLine($"- {mismatch.EndpointKey}");
builder.AppendLine($" Spec: [{string.Join(", ", mismatch.SpecCodes)}]");
builder.AppendLine($" Code: [{string.Join(", ", mismatch.CodeCodes)}]");
}
builder.AppendLine();
}
if (result.AuthMismatches.Length > 0)
{
builder.AppendLine("## Auth Requirement Mismatches");
builder.AppendLine();
foreach (var mismatch in result.AuthMismatches)
{
builder.AppendLine($"- {mismatch.EndpointKey}");
builder.AppendLine($" Spec requires auth: {mismatch.SpecRequiresAuth}");
builder.AppendLine($" Code requires auth: {mismatch.CodeRequiresAuth}");
}
}
return builder.ToString();
}
}

View File

@@ -0,0 +1,312 @@
// -----------------------------------------------------------------------------
// SchemaComplianceTests.cs
// Tests that verify database schemas comply with specification documents
// Sprint: Testing Enhancement Advisory - Phase 1.1
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.RegularExpressions;
using FluentAssertions;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Architecture.Contracts.Tests;
/// <summary>
/// Schema compliance tests.
/// Verifies that database migrations align with specification documents.
/// </summary>
[Trait("Category", TestCategories.Architecture)]
[Trait("Category", TestCategories.Contract)]
public partial class SchemaComplianceTests
{
private static readonly string RepoRoot = FindRepoRoot();
private static readonly string DocsDbPath = Path.Combine(RepoRoot, "docs", "db");
private static readonly string SrcPath = Path.Combine(RepoRoot, "src");
[GeneratedRegex(@"CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?([a-z_]+\.)?([a-z_]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex CreateTableRegex();
[GeneratedRegex(@"ALTER\s+TABLE\s+(?:IF\s+EXISTS\s+)?([a-z_]+\.)?([a-z_]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex AlterTableRegex();
[GeneratedRegex(@"CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?:IF\s+NOT\s+EXISTS\s+)?([a-z_]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex CreateIndexRegex();
/// <summary>
/// Verifies that database specification document exists.
/// </summary>
[Fact]
public void DatabaseSpecification_Exists()
{
// Arrange
var specPath = Path.Combine(DocsDbPath, "SPECIFICATION.md");
// Assert
File.Exists(specPath).Should().BeTrue(
"Database specification document should exist at docs/db/SPECIFICATION.md");
}
/// <summary>
/// Verifies that all migration files follow naming convention.
/// </summary>
[Fact]
public void MigrationFiles_FollowNamingConvention()
{
// Arrange
var migrationFiles = GetMigrationFiles();
// Act & Assert
foreach (var file in migrationFiles)
{
var fileName = Path.GetFileName(file);
// Should start with a number (version/sequence)
fileName.Should().MatchRegex(@"^\d+",
$"Migration file {fileName} should start with a version number");
// Should have .sql extension
Path.GetExtension(file).Should().Be(".sql",
$"Migration file {fileName} should have .sql extension");
}
}
/// <summary>
/// Verifies that migrations use schema-qualified table names.
/// </summary>
[Fact]
public void Migrations_UseSchemaQualifiedTableNames()
{
// Arrange
var migrationFiles = GetMigrationFiles();
var violations = new List<string>();
// Act
foreach (var file in migrationFiles)
{
var content = File.ReadAllText(file);
var fileName = Path.GetFileName(file);
// Check CREATE TABLE statements
var createMatches = CreateTableRegex().Matches(content);
foreach (Match match in createMatches)
{
var schema = match.Groups[1].Value;
var table = match.Groups[2].Value;
if (string.IsNullOrEmpty(schema))
{
violations.Add($"{fileName}: CREATE TABLE {table} missing schema qualifier");
}
}
// Check ALTER TABLE statements
var alterMatches = AlterTableRegex().Matches(content);
foreach (Match match in alterMatches)
{
var schema = match.Groups[1].Value;
var table = match.Groups[2].Value;
if (string.IsNullOrEmpty(schema))
{
violations.Add($"{fileName}: ALTER TABLE {table} missing schema qualifier");
}
}
}
// Assert
violations.Should().BeEmpty(
$"All table operations should use schema-qualified names. Violations: {string.Join(", ", violations.Take(10))}");
}
/// <summary>
/// Verifies that migration files are idempotent (use IF NOT EXISTS / IF EXISTS).
/// </summary>
[Fact]
public void Migrations_AreIdempotent()
{
// Arrange
var migrationFiles = GetMigrationFiles();
var nonIdempotent = new List<string>();
// Act
foreach (var file in migrationFiles)
{
var content = File.ReadAllText(file);
var fileName = Path.GetFileName(file);
// Check CREATE TABLE without IF NOT EXISTS
if (Regex.IsMatch(content, @"CREATE\s+TABLE\s+(?!IF\s+NOT\s+EXISTS)", RegexOptions.IgnoreCase))
{
nonIdempotent.Add($"{fileName}: CREATE TABLE without IF NOT EXISTS");
}
// Check CREATE INDEX without IF NOT EXISTS
if (Regex.IsMatch(content, @"CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?!IF\s+NOT\s+EXISTS)", RegexOptions.IgnoreCase))
{
nonIdempotent.Add($"{fileName}: CREATE INDEX without IF NOT EXISTS");
}
}
// Assert - this is a warning, not a hard failure
// Some migrations may intentionally not be idempotent
if (nonIdempotent.Any())
{
Console.WriteLine("Warning: Non-idempotent migrations found:");
foreach (var item in nonIdempotent)
{
Console.WriteLine($" - {item}");
}
}
}
/// <summary>
/// Verifies that schema documentation exists for all schemas used in migrations.
/// </summary>
[Fact]
public void SchemaDocumentation_ExistsForAllSchemas()
{
// Arrange
var migrationFiles = GetMigrationFiles();
var schemasUsed = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var schemasDocumented = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
// Find schemas used in migrations
foreach (var file in migrationFiles)
{
var content = File.ReadAllText(file);
// Extract schema names from CREATE SCHEMA
var createSchemaMatches = Regex.Matches(content, @"CREATE\s+SCHEMA\s+(?:IF\s+NOT\s+EXISTS\s+)?([a-z_]+)", RegexOptions.IgnoreCase);
foreach (Match match in createSchemaMatches)
{
schemasUsed.Add(match.Groups[1].Value);
}
// Extract schema names from table operations
var tableMatches = CreateTableRegex().Matches(content);
foreach (Match match in tableMatches)
{
var schema = match.Groups[1].Value.TrimEnd('.');
if (!string.IsNullOrEmpty(schema))
{
schemasUsed.Add(schema);
}
}
}
// Find documented schemas
var schemaDocsPath = Path.Combine(DocsDbPath, "schemas");
if (Directory.Exists(schemaDocsPath))
{
var docFiles = Directory.GetFiles(schemaDocsPath, "*.md", SearchOption.TopDirectoryOnly);
foreach (var docFile in docFiles)
{
var schemaName = Path.GetFileNameWithoutExtension(docFile);
schemasDocumented.Add(schemaName);
}
}
// Assert
var undocumented = schemasUsed.Except(schemasDocumented).ToList();
// Output for visibility
if (undocumented.Any())
{
Console.WriteLine($"Schemas without documentation: {string.Join(", ", undocumented)}");
}
// Soft assertion - warn but don't fail
schemasUsed.Should().NotBeEmpty("Should find schemas used in migrations");
}
/// <summary>
/// Verifies that migrations have corresponding down/rollback scripts where appropriate.
/// </summary>
[Fact]
public void Migrations_HaveDownScripts()
{
// Arrange
var migrationFiles = GetMigrationFiles();
var upScripts = migrationFiles.Where(f =>
!Path.GetFileName(f).Contains("_down", StringComparison.OrdinalIgnoreCase) &&
!Path.GetFileName(f).Contains("_rollback", StringComparison.OrdinalIgnoreCase)).ToList();
var missingDownScripts = new List<string>();
// Act
foreach (var upScript in upScripts)
{
var fileName = Path.GetFileName(upScript);
var directory = Path.GetDirectoryName(upScript)!;
// Look for corresponding down script
var baseName = Path.GetFileNameWithoutExtension(fileName);
var expectedDownNames = new[]
{
$"{baseName}_down.sql",
$"{baseName}_rollback.sql",
$"{baseName}.down.sql"
};
var hasDownScript = expectedDownNames.Any(downName =>
File.Exists(Path.Combine(directory, downName)));
if (!hasDownScript)
{
missingDownScripts.Add(fileName);
}
}
// Assert - informational
if (missingDownScripts.Any())
{
Console.WriteLine($"Migrations without down scripts ({missingDownScripts.Count}):");
foreach (var script in missingDownScripts.Take(10))
{
Console.WriteLine($" - {script}");
}
}
}
#region Helper Methods
private static string FindRepoRoot()
{
var current = Directory.GetCurrentDirectory();
while (current is not null)
{
if (Directory.Exists(Path.Combine(current, ".git")) ||
File.Exists(Path.Combine(current, "CLAUDE.md")))
{
return current;
}
current = Directory.GetParent(current)?.FullName;
}
return Path.GetFullPath(Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "..", ".."));
}
private static ImmutableArray<string> GetMigrationFiles()
{
var migrationDirs = new List<string>();
// Find all Migrations directories
if (Directory.Exists(SrcPath))
{
migrationDirs.AddRange(
Directory.GetDirectories(SrcPath, "Migrations", SearchOption.AllDirectories));
}
var allMigrations = new List<string>();
foreach (var dir in migrationDirs)
{
allMigrations.AddRange(Directory.GetFiles(dir, "*.sql", SearchOption.AllDirectories));
}
return [.. allMigrations];
}
#endregion
}

View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<!-- Test packages inherited from Directory.Build.props -->
<PackageReference Include="FluentAssertions" />
<PackageReference Include="YamlDotNet" />
<PackageReference Include="coverlet.collector">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
</ItemGroup>
<!-- Include OpenAPI specs as content files for testing -->
<ItemGroup>
<None Include="..\..\..\..\docs\api\**\*.yaml" Link="Specs\%(RecursiveDir)%(Filename)%(Extension)">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Include="..\..\..\..\docs\contracts\**\*.yaml" Link="Contracts\%(RecursiveDir)%(Filename)%(Extension)">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -1,7 +1,7 @@
# Architecture Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |