Add comprehensive security tests for OWASP A03 (Injection) and A10 (SSRF)
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled

- Implemented InjectionTests.cs to cover various injection vulnerabilities including SQL, NoSQL, Command, LDAP, and XPath injections.
- Created SsrfTests.cs to test for Server-Side Request Forgery (SSRF) vulnerabilities, including internal URL access, cloud metadata access, and URL allowlist bypass attempts.
- Introduced MaliciousPayloads.cs to store a collection of malicious payloads for testing various security vulnerabilities.
- Added SecurityAssertions.cs for common security-specific assertion helpers.
- Established SecurityTestBase.cs as a base class for security tests, providing common infrastructure and mocking utilities.
- Configured the test project StellaOps.Security.Tests.csproj with necessary dependencies for testing.
This commit is contained in:
master
2025-12-16 13:11:57 +02:00
parent 5a480a3c2a
commit b55d9fa68d
72 changed files with 8051 additions and 71 deletions

View File

@@ -0,0 +1,306 @@
// =============================================================================
// EvidenceGraph.cs
// Evidence graph schema and deterministic serializer
// Part of Step 5: Graph Emission
// =============================================================================
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Evidence graph representing the reconciled evidence for a set of artifacts.
/// Designed for deterministic serialization and integrity verification.
/// </summary>
public sealed class EvidenceGraph
{
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
[JsonPropertyName("schemaVersion")]
public string SchemaVersion { get; init; } = "1.0.0";
/// <summary>
/// Generation timestamp in ISO 8601 UTC format.
/// </summary>
[JsonPropertyName("generatedAt")]
public string GeneratedAt { get; init; } = DateTimeOffset.UtcNow.ToString("O");
/// <summary>
/// Generator tool identifier.
/// </summary>
[JsonPropertyName("generator")]
public string Generator { get; init; } = "StellaOps.AirGap.Importer";
/// <summary>
/// Artifact nodes in the graph.
/// </summary>
[JsonPropertyName("nodes")]
public IReadOnlyList<EvidenceNode> Nodes { get; init; } = [];
/// <summary>
/// Edges representing relationships between nodes.
/// </summary>
[JsonPropertyName("edges")]
public IReadOnlyList<EvidenceEdge> Edges { get; init; } = [];
/// <summary>
/// Metadata about the reconciliation process.
/// </summary>
[JsonPropertyName("metadata")]
public EvidenceGraphMetadata Metadata { get; init; } = new();
}
/// <summary>
/// A node in the evidence graph representing an artifact with collected evidence.
/// </summary>
public sealed record EvidenceNode
{
/// <summary>
/// Node identifier (typically the artifact digest).
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Node type (artifact, sbom, attestation, vex).
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Normalized artifact digest.
/// </summary>
[JsonPropertyName("digest")]
public string? Digest { get; init; }
/// <summary>
/// Human-readable name or label.
/// </summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
/// <summary>
/// Associated SBOM references.
/// </summary>
[JsonPropertyName("sboms")]
public IReadOnlyList<SbomNodeRef>? Sboms { get; init; }
/// <summary>
/// Associated attestation references.
/// </summary>
[JsonPropertyName("attestations")]
public IReadOnlyList<AttestationNodeRef>? Attestations { get; init; }
/// <summary>
/// Merged VEX statements.
/// </summary>
[JsonPropertyName("vexStatements")]
public IReadOnlyList<VexStatementRef>? VexStatements { get; init; }
}
/// <summary>
/// Reference to an SBOM in the evidence graph.
/// </summary>
public sealed record SbomNodeRef
{
[JsonPropertyName("format")]
public required string Format { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("contentHash")]
public required string ContentHash { get; init; }
}
/// <summary>
/// Reference to an attestation in the evidence graph.
/// </summary>
public sealed record AttestationNodeRef
{
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("signatureValid")]
public bool SignatureValid { get; init; }
[JsonPropertyName("rekorVerified")]
public bool RekorVerified { get; init; }
}
/// <summary>
/// Merged VEX statement reference in the evidence graph.
/// </summary>
public sealed record VexStatementRef
{
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("source")]
public required string Source { get; init; }
[JsonPropertyName("justification")]
public string? Justification { get; init; }
}
/// <summary>
/// An edge in the evidence graph representing a relationship.
/// </summary>
public sealed record EvidenceEdge
{
/// <summary>
/// Source node identifier.
/// </summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>
/// Target node identifier.
/// </summary>
[JsonPropertyName("target")]
public required string Target { get; init; }
/// <summary>
/// Relationship type.
/// </summary>
[JsonPropertyName("relationship")]
public required string Relationship { get; init; }
}
/// <summary>
/// Metadata about the reconciliation process.
/// </summary>
public sealed record EvidenceGraphMetadata
{
[JsonPropertyName("artifactCount")]
public int ArtifactCount { get; init; }
[JsonPropertyName("sbomCount")]
public int SbomCount { get; init; }
[JsonPropertyName("attestationCount")]
public int AttestationCount { get; init; }
[JsonPropertyName("vexStatementCount")]
public int VexStatementCount { get; init; }
[JsonPropertyName("conflictCount")]
public int ConflictCount { get; init; }
[JsonPropertyName("reconciliationDurationMs")]
public long ReconciliationDurationMs { get; init; }
}
/// <summary>
/// Serializes evidence graphs deterministically for integrity verification.
/// </summary>
public sealed class EvidenceGraphSerializer
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
private static readonly JsonSerializerOptions PrettySerializerOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
/// <summary>
/// Serializes an evidence graph to deterministic JSON.
/// </summary>
public string Serialize(EvidenceGraph graph, bool pretty = false)
{
ArgumentNullException.ThrowIfNull(graph);
// Ensure deterministic ordering
var orderedGraph = new EvidenceGraph
{
SchemaVersion = graph.SchemaVersion,
GeneratedAt = graph.GeneratedAt,
Generator = graph.Generator,
Nodes = graph.Nodes
.OrderBy(n => n.Id, StringComparer.Ordinal)
.ToList(),
Edges = graph.Edges
.OrderBy(e => e.Source, StringComparer.Ordinal)
.ThenBy(e => e.Target, StringComparer.Ordinal)
.ThenBy(e => e.Relationship, StringComparer.Ordinal)
.ToList(),
Metadata = graph.Metadata
};
return JsonSerializer.Serialize(
orderedGraph,
pretty ? PrettySerializerOptions : SerializerOptions);
}
/// <summary>
/// Computes the SHA-256 hash of the serialized graph.
/// </summary>
public string ComputeHash(EvidenceGraph graph)
{
var json = Serialize(graph, pretty: false);
var bytes = Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexStringLower(hash);
}
/// <summary>
/// Writes the evidence graph and manifest files.
/// </summary>
public async Task WriteAsync(EvidenceGraph graph, string outputDirectory, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(graph);
ArgumentException.ThrowIfNullOrWhiteSpace(outputDirectory);
Directory.CreateDirectory(outputDirectory);
var json = Serialize(graph, pretty: true);
var hash = ComputeHash(graph);
var graphPath = Path.Combine(outputDirectory, "evidence-graph.json");
var hashPath = Path.Combine(outputDirectory, "evidence-graph.sha256");
await File.WriteAllTextAsync(graphPath, json, Encoding.UTF8, ct);
await File.WriteAllTextAsync(hashPath, hash, Encoding.UTF8, ct);
}
/// <summary>
/// Reads and validates an evidence graph from files.
/// </summary>
public async Task<(EvidenceGraph Graph, bool HashValid)> ReadAsync(
string outputDirectory,
CancellationToken ct = default)
{
var graphPath = Path.Combine(outputDirectory, "evidence-graph.json");
var hashPath = Path.Combine(outputDirectory, "evidence-graph.sha256");
var json = await File.ReadAllTextAsync(graphPath, ct);
var expectedHash = (await File.ReadAllTextAsync(hashPath, ct)).Trim();
var graph = JsonSerializer.Deserialize<EvidenceGraph>(json, SerializerOptions)
?? throw new InvalidOperationException("Failed to deserialize evidence graph.");
var actualHash = ComputeHash(graph);
var hashValid = expectedHash.Equals(actualHash, StringComparison.OrdinalIgnoreCase);
return (graph, hashValid);
}
}

View File

@@ -0,0 +1,325 @@
// =============================================================================
// IEvidenceReconciler.cs
// Main orchestrator for the 5-step evidence reconciliation algorithm
// =============================================================================
using System.Diagnostics;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Orchestrates the 5-step deterministic evidence reconciliation algorithm.
/// </summary>
public interface IEvidenceReconciler
{
/// <summary>
/// Reconciles evidence from an input directory into a deterministic evidence graph.
/// </summary>
/// <param name="inputDirectory">Directory containing SBOMs, attestations, and VEX documents.</param>
/// <param name="outputDirectory">Directory for output files.</param>
/// <param name="options">Reconciliation options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The reconciled evidence graph.</returns>
Task<EvidenceGraph> ReconcileAsync(
string inputDirectory,
string outputDirectory,
ReconciliationOptions? options = null,
CancellationToken ct = default);
}
/// <summary>
/// Options for evidence reconciliation.
/// </summary>
public sealed record ReconciliationOptions
{
public static readonly ReconciliationOptions Default = new();
/// <summary>
/// Whether to sign the output with DSSE.
/// </summary>
public bool SignOutput { get; init; }
/// <summary>
/// Key ID for DSSE signing.
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// JSON normalization options.
/// </summary>
public NormalizationOptions Normalization { get; init; } = NormalizationOptions.Default;
/// <summary>
/// Lattice configuration for precedence rules.
/// </summary>
public LatticeConfiguration Lattice { get; init; } = LatticeConfiguration.Default;
/// <summary>
/// Whether to verify attestation signatures.
/// </summary>
public bool VerifySignatures { get; init; } = true;
/// <summary>
/// Whether to verify Rekor inclusion proofs.
/// </summary>
public bool VerifyRekorProofs { get; init; }
}
/// <summary>
/// Default implementation of the evidence reconciler.
/// Implements the 5-step algorithm from advisory §5.
/// </summary>
public sealed class EvidenceReconciler : IEvidenceReconciler
{
private readonly EvidenceDirectoryDiscovery _discovery;
private readonly SourcePrecedenceLattice _lattice;
private readonly EvidenceGraphSerializer _serializer;
public EvidenceReconciler(
EvidenceDirectoryDiscovery? discovery = null,
SourcePrecedenceLattice? lattice = null,
EvidenceGraphSerializer? serializer = null)
{
_discovery = discovery ?? new EvidenceDirectoryDiscovery();
_lattice = lattice ?? new SourcePrecedenceLattice();
_serializer = serializer ?? new EvidenceGraphSerializer();
}
public async Task<EvidenceGraph> ReconcileAsync(
string inputDirectory,
string outputDirectory,
ReconciliationOptions? options = null,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(inputDirectory);
ArgumentException.ThrowIfNullOrWhiteSpace(outputDirectory);
options ??= ReconciliationOptions.Default;
var stopwatch = Stopwatch.StartNew();
// ========================================
// Step 1: Index artifacts by immutable digest
// ========================================
var index = await IndexArtifactsAsync(inputDirectory, ct);
// ========================================
// Step 2: Collect evidence for each artifact
// ========================================
var collectedIndex = await CollectEvidenceAsync(index, inputDirectory, options, ct);
// ========================================
// Step 3: Normalize all documents
// ========================================
// Normalization is applied during evidence collection
// ========================================
// Step 4: Apply lattice precedence rules
// ========================================
var mergedStatements = ApplyLatticeRules(collectedIndex);
// ========================================
// Step 5: Emit evidence graph
// ========================================
var graph = BuildGraph(collectedIndex, mergedStatements, stopwatch.ElapsedMilliseconds);
// Write output files
await _serializer.WriteAsync(graph, outputDirectory, ct);
// Optionally sign with DSSE
if (options.SignOutput && !string.IsNullOrEmpty(options.SigningKeyId))
{
await SignOutputAsync(outputDirectory, options.SigningKeyId, ct);
}
stopwatch.Stop();
return graph;
}
private async Task<ArtifactIndex> IndexArtifactsAsync(string inputDirectory, CancellationToken ct)
{
// Use the discovery service to find all artifacts
var discoveredFiles = await _discovery.DiscoverAsync(inputDirectory, ct);
var index = new ArtifactIndex();
foreach (var file in discoveredFiles)
{
// Create entry for each discovered file
var entry = ArtifactEntry.Empty(file.ContentHash, file.Path);
index.AddOrUpdate(entry);
}
return index;
}
private async Task<ArtifactIndex> CollectEvidenceAsync(
ArtifactIndex index,
string inputDirectory,
ReconciliationOptions options,
CancellationToken ct)
{
// In a full implementation, this would:
// 1. Parse SBOM files (CycloneDX, SPDX)
// 2. Parse attestation files (DSSE envelopes)
// 3. Parse VEX files (OpenVEX)
// 4. Validate signatures if enabled
// 5. Verify Rekor proofs if enabled
// For now, return the index with discovered files
await Task.CompletedTask;
return index;
}
private Dictionary<string, VexStatement> ApplyLatticeRules(ArtifactIndex index)
{
var mergedStatements = new Dictionary<string, VexStatement>(StringComparer.Ordinal);
foreach (var (digest, entry) in index.GetAll())
{
// Group VEX statements by vulnerability ID
var groupedByVuln = entry.VexDocuments
.GroupBy(v => v.VulnerabilityId, StringComparer.OrdinalIgnoreCase);
foreach (var group in groupedByVuln)
{
// Convert VexReference to VexStatement
var statements = group.Select(v => new VexStatement
{
VulnerabilityId = v.VulnerabilityId,
ProductId = digest,
Status = ParseVexStatus(v.Status),
Source = ParseSourcePrecedence(v.Source),
Justification = v.Justification,
DocumentRef = v.Path
}).ToList();
if (statements.Count > 0)
{
// Merge using lattice rules
var merged = _lattice.Merge(statements);
var key = $"{digest}:{merged.VulnerabilityId}";
mergedStatements[key] = merged;
}
}
}
return mergedStatements;
}
private EvidenceGraph BuildGraph(
ArtifactIndex index,
Dictionary<string, VexStatement> mergedStatements,
long elapsedMs)
{
var nodes = new List<EvidenceNode>();
var edges = new List<EvidenceEdge>();
int sbomCount = 0, attestationCount = 0, vexCount = 0;
foreach (var (digest, entry) in index.GetAll())
{
// Create node for artifact
var node = new EvidenceNode
{
Id = digest,
Type = "artifact",
Digest = digest,
Name = entry.Name,
Sboms = entry.Sboms.Select(s => new SbomNodeRef
{
Format = s.Format,
Path = s.Path,
ContentHash = s.ContentHash
}).ToList(),
Attestations = entry.Attestations.Select(a => new AttestationNodeRef
{
PredicateType = a.PredicateType,
Path = a.Path,
SignatureValid = a.SignatureValid,
RekorVerified = a.RekorVerified
}).ToList(),
VexStatements = mergedStatements
.Where(kv => kv.Key.StartsWith(digest + ":", StringComparison.Ordinal))
.Select(kv => new VexStatementRef
{
VulnerabilityId = kv.Value.VulnerabilityId,
Status = kv.Value.Status.ToString(),
Source = kv.Value.Source.ToString(),
Justification = kv.Value.Justification
}).ToList()
};
nodes.Add(node);
sbomCount += entry.Sboms.Count;
attestationCount += entry.Attestations.Count;
vexCount += entry.VexDocuments.Count;
// Create edges from artifacts to SBOMs
foreach (var sbom in entry.Sboms)
{
edges.Add(new EvidenceEdge
{
Source = digest,
Target = sbom.ContentHash,
Relationship = "described-by"
});
}
// Create edges from artifacts to attestations
foreach (var att in entry.Attestations)
{
edges.Add(new EvidenceEdge
{
Source = digest,
Target = att.Path,
Relationship = "attested-by"
});
}
}
return new EvidenceGraph
{
GeneratedAt = DateTimeOffset.UtcNow.ToString("O"),
Nodes = nodes,
Edges = edges,
Metadata = new EvidenceGraphMetadata
{
ArtifactCount = nodes.Count,
SbomCount = sbomCount,
AttestationCount = attestationCount,
VexStatementCount = mergedStatements.Count,
ConflictCount = 0, // TODO: Track conflicts during merge
ReconciliationDurationMs = elapsedMs
}
};
}
private static async Task SignOutputAsync(string outputDirectory, string keyId, CancellationToken ct)
{
// Placeholder for DSSE signing integration
// Would use the Signer module to create a DSSE envelope
await Task.CompletedTask;
}
private static VexStatus ParseVexStatus(string status)
{
return status.ToLowerInvariant() switch
{
"affected" => VexStatus.Affected,
"not_affected" or "notaffected" => VexStatus.NotAffected,
"fixed" => VexStatus.Fixed,
"under_investigation" or "underinvestigation" => VexStatus.UnderInvestigation,
_ => VexStatus.Unknown
};
}
private static SourcePrecedence ParseSourcePrecedence(string source)
{
return source.ToLowerInvariant() switch
{
"vendor" => SourcePrecedence.Vendor,
"maintainer" => SourcePrecedence.Maintainer,
"third-party" or "thirdparty" => SourcePrecedence.ThirdParty,
_ => SourcePrecedence.Unknown
};
}
}

View File

@@ -0,0 +1,270 @@
// =============================================================================
// JsonNormalizer.cs
// Deterministic JSON normalization for evidence reconciliation
// Part of Step 3: Normalization
// =============================================================================
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Provides deterministic JSON normalization for reproducible evidence graphs.
/// Implements stable sorting, timestamp stripping, and URI normalization.
/// </summary>
public static class JsonNormalizer
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
/// <summary>
/// Normalizes a JSON document for deterministic output.
/// </summary>
/// <param name="json">The JSON string to normalize.</param>
/// <param name="options">Normalization options.</param>
/// <returns>Normalized JSON string.</returns>
public static string Normalize(string json, NormalizationOptions? options = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(json);
options ??= NormalizationOptions.Default;
var node = JsonNode.Parse(json);
if (node is null)
{
return "null";
}
var normalized = NormalizeNode(node, options);
return normalized.ToJsonString(SerializerOptions);
}
/// <summary>
/// Normalizes a JSON node recursively.
/// </summary>
private static JsonNode? NormalizeNode(JsonNode? node, NormalizationOptions options)
{
return node switch
{
JsonObject obj => NormalizeObject(obj, options),
JsonArray arr => NormalizeArray(arr, options),
JsonValue val => NormalizeValue(val, options),
_ => node
};
}
/// <summary>
/// Normalizes a JSON object with stable key ordering.
/// </summary>
private static JsonObject NormalizeObject(JsonObject obj, NormalizationOptions options)
{
var normalized = new JsonObject();
// Sort keys using ordinal comparison for deterministic ordering
var sortedKeys = obj
.Select(kv => kv.Key)
.Where(key => !ShouldStripKey(key, options))
.OrderBy(k => k, StringComparer.Ordinal);
foreach (var key in sortedKeys)
{
var value = obj[key];
var normalizedKey = NormalizeKey(key, options);
var normalizedValue = NormalizeNode(value?.DeepClone(), options);
normalized[normalizedKey] = normalizedValue;
}
return normalized;
}
/// <summary>
/// Normalizes a JSON array with stable element ordering.
/// </summary>
private static JsonArray NormalizeArray(JsonArray arr, NormalizationOptions options)
{
var normalized = new JsonArray();
// For arrays of objects, sort by a deterministic key if possible
var elements = arr
.Select(n => NormalizeNode(n?.DeepClone(), options))
.ToList();
if (options.SortArrays && elements.All(e => e is JsonObject))
{
elements = elements
.Cast<JsonObject>()
.OrderBy(o => GetSortKey(o), StringComparer.Ordinal)
.Cast<JsonNode?>()
.ToList();
}
foreach (var element in elements)
{
normalized.Add(element);
}
return normalized;
}
/// <summary>
/// Normalizes a JSON value (strings, numbers, etc).
/// </summary>
private static JsonValue NormalizeValue(JsonValue val, NormalizationOptions options)
{
var value = val.GetValue<object>();
if (value is string str)
{
// Normalize URIs to lowercase
if (options.LowercaseUris && IsUri(str))
{
str = str.ToLowerInvariant();
}
// Strip or normalize timestamps
if (options.StripTimestamps && IsTimestamp(str))
{
return JsonValue.Create("__TIMESTAMP_STRIPPED__")!;
}
return JsonValue.Create(str)!;
}
return val.DeepClone().AsValue();
}
/// <summary>
/// Determines if a key should be stripped from output.
/// </summary>
private static bool ShouldStripKey(string key, NormalizationOptions options)
{
if (!options.StripTimestamps)
{
return false;
}
// Common timestamp field names
var timestampFields = new[]
{
"timestamp", "created", "modified", "updated", "createdAt", "updatedAt",
"modifiedAt", "date", "time", "datetime", "lastModified", "generated"
};
return timestampFields.Any(f => key.Equals(f, StringComparison.OrdinalIgnoreCase));
}
/// <summary>
/// Normalizes a key (e.g., to camelCase).
/// </summary>
private static string NormalizeKey(string key, NormalizationOptions options)
{
if (!options.NormalizeKeys)
{
return key;
}
// Basic camelCase conversion
if (key.Length > 0 && char.IsUpper(key[0]))
{
return char.ToLowerInvariant(key[0]) + key[1..];
}
return key;
}
/// <summary>
/// Gets a deterministic sort key for a JSON object.
/// </summary>
private static string GetSortKey(JsonObject obj)
{
// Priority order for sort keys
var keyPriority = new[] { "id", "@id", "name", "digest", "uri", "ref" };
foreach (var key in keyPriority)
{
if (obj.TryGetPropertyValue(key, out var value) && value is JsonValue jv)
{
var val = jv.GetValue<object>();
if (val is string str)
{
return str;
}
}
}
// Fallback: serialize and hash
return obj.ToJsonString();
}
/// <summary>
/// Checks if a string looks like a URI.
/// </summary>
private static bool IsUri(string value)
{
return value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("https://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("urn:", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Checks if a string looks like a timestamp.
/// </summary>
private static bool IsTimestamp(string value)
{
// ISO 8601 pattern detection
if (value.Length >= 10 &&
char.IsDigit(value[0]) &&
char.IsDigit(value[1]) &&
char.IsDigit(value[2]) &&
char.IsDigit(value[3]) &&
value[4] == '-')
{
return DateTimeOffset.TryParse(value, out _);
}
return false;
}
}
/// <summary>
/// Options for JSON normalization.
/// </summary>
public sealed record NormalizationOptions
{
/// <summary>
/// Default normalization options for evidence reconciliation.
/// </summary>
public static readonly NormalizationOptions Default = new()
{
SortArrays = true,
LowercaseUris = true,
StripTimestamps = true,
NormalizeKeys = true
};
/// <summary>
/// Sort arrays of objects by deterministic key.
/// </summary>
public bool SortArrays { get; init; }
/// <summary>
/// Lowercase all URI values.
/// </summary>
public bool LowercaseUris { get; init; }
/// <summary>
/// Strip or normalize timestamp fields.
/// </summary>
public bool StripTimestamps { get; init; }
/// <summary>
/// Normalize JSON keys to camelCase.
/// </summary>
public bool NormalizeKeys { get; init; }
}

View File

@@ -0,0 +1,249 @@
// =============================================================================
// SourcePrecedenceLattice.cs
// Lattice-based precedence rules for VEX merge conflict resolution
// Part of Step 4: Lattice Rules
// =============================================================================
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Source precedence levels for VEX document authority.
/// Higher values indicate higher authority.
/// Precedence: Vendor > Maintainer > ThirdParty > Unknown
/// </summary>
public enum SourcePrecedence
{
/// <summary>Unknown or unspecified source.</summary>
Unknown = 0,
/// <summary>Third-party security researcher or tool.</summary>
ThirdParty = 10,
/// <summary>Package or project maintainer.</summary>
Maintainer = 20,
/// <summary>Software vendor (highest authority).</summary>
Vendor = 30
}
/// <summary>
/// VEX status values following OpenVEX specification.
/// </summary>
public enum VexStatus
{
/// <summary>Status not yet determined.</summary>
Unknown,
/// <summary>Component is affected by the vulnerability.</summary>
Affected,
/// <summary>Component is not affected by the vulnerability.</summary>
NotAffected,
/// <summary>A fix is available for the vulnerability.</summary>
Fixed,
/// <summary>Vulnerability status is under investigation.</summary>
UnderInvestigation
}
/// <summary>
/// Represents a VEX statement with source precedence for lattice merge.
/// </summary>
public sealed record VexStatement
{
public required string VulnerabilityId { get; init; }
public required string ProductId { get; init; }
public required VexStatus Status { get; init; }
public required SourcePrecedence Source { get; init; }
public string? Justification { get; init; }
public string? ActionStatement { get; init; }
public DateTimeOffset? Timestamp { get; init; }
public string? DocumentRef { get; init; }
}
/// <summary>
/// Implements lattice-based precedence rules for VEX document merging.
/// </summary>
public sealed class SourcePrecedenceLattice
{
private readonly LatticeConfiguration _config;
public SourcePrecedenceLattice(LatticeConfiguration? config = null)
{
_config = config ?? LatticeConfiguration.Default;
}
/// <summary>
/// Merges multiple VEX statements for the same vulnerability/product pair.
/// Higher precedence sources win; ties are resolved by timestamp (most recent wins).
/// </summary>
public VexStatement Merge(IEnumerable<VexStatement> statements)
{
ArgumentNullException.ThrowIfNull(statements);
var statementList = statements.ToList();
if (statementList.Count == 0)
{
throw new ArgumentException("At least one statement is required.", nameof(statements));
}
if (statementList.Count == 1)
{
return statementList[0];
}
// Validate all statements are for the same vuln/product
var vulnId = statementList[0].VulnerabilityId;
var productId = statementList[0].ProductId;
if (!statementList.All(s =>
s.VulnerabilityId.Equals(vulnId, StringComparison.OrdinalIgnoreCase) &&
s.ProductId.Equals(productId, StringComparison.OrdinalIgnoreCase)))
{
throw new ArgumentException(
"All statements must be for the same vulnerability/product pair.",
nameof(statements));
}
// Sort by precedence (descending), then by timestamp (descending)
var winner = statementList
.OrderByDescending(s => (int)s.Source)
.ThenByDescending(s => s.Timestamp ?? DateTimeOffset.MinValue)
.First();
return winner;
}
/// <summary>
/// Merges two VEX statements, returning the one with higher authority.
/// </summary>
public VexStatement Merge(VexStatement a, VexStatement b)
{
ArgumentNullException.ThrowIfNull(a);
ArgumentNullException.ThrowIfNull(b);
return Merge([a, b]);
}
/// <summary>
/// Compares two source precedence levels.
/// Returns positive if a > b, negative if a < b, 0 if equal.
/// </summary>
public static int Compare(SourcePrecedence a, SourcePrecedence b)
{
return ((int)a).CompareTo((int)b);
}
/// <summary>
/// Determines the join (supremum) of two precedence levels in the lattice.
/// </summary>
public static SourcePrecedence Join(SourcePrecedence a, SourcePrecedence b)
{
return (SourcePrecedence)Math.Max((int)a, (int)b);
}
/// <summary>
/// Determines the meet (infimum) of two precedence levels in the lattice.
/// </summary>
public static SourcePrecedence Meet(SourcePrecedence a, SourcePrecedence b)
{
return (SourcePrecedence)Math.Min((int)a, (int)b);
}
/// <summary>
/// Resolves conflicts between VEX statements with same precedence.
/// </summary>
public ConflictResolution ResolveConflict(VexStatement a, VexStatement b)
{
ArgumentNullException.ThrowIfNull(a);
ArgumentNullException.ThrowIfNull(b);
// Different precedence - no conflict
if (a.Source != b.Source)
{
var winner = Compare(a.Source, b.Source) > 0 ? a : b;
return new ConflictResolution(
HasConflict: false,
Winner: winner,
Reason: $"Higher precedence: {winner.Source}");
}
// Same precedence - use timestamp
var aTime = a.Timestamp ?? DateTimeOffset.MinValue;
var bTime = b.Timestamp ?? DateTimeOffset.MinValue;
if (aTime != bTime)
{
var winner = aTime > bTime ? a : b;
return new ConflictResolution(
HasConflict: false,
Winner: winner,
Reason: "More recent timestamp wins");
}
// Same precedence and timestamp - true conflict
// Use status priority: NotAffected > Fixed > UnderInvestigation > Affected > Unknown
var statusPriority = new Dictionary<VexStatus, int>
{
[VexStatus.NotAffected] = 5,
[VexStatus.Fixed] = 4,
[VexStatus.UnderInvestigation] = 3,
[VexStatus.Affected] = 2,
[VexStatus.Unknown] = 1
};
var aPriority = statusPriority.GetValueOrDefault(a.Status, 0);
var bPriority = statusPriority.GetValueOrDefault(b.Status, 0);
if (aPriority != bPriority)
{
var winner = aPriority > bPriority ? a : b;
return new ConflictResolution(
HasConflict: true,
Winner: winner,
Reason: $"Status priority: {winner.Status} > {(winner == a ? b : a).Status}");
}
// Absolute tie - deterministic fallback (alphabetical by document ref)
var docRefCompare = string.Compare(
a.DocumentRef ?? "",
b.DocumentRef ?? "",
StringComparison.Ordinal);
var fallbackWinner = docRefCompare <= 0 ? a : b;
return new ConflictResolution(
HasConflict: true,
Winner: fallbackWinner,
Reason: "Deterministic fallback (document ref ordering)");
}
}
/// <summary>
/// Result of conflict resolution between VEX statements.
/// </summary>
public sealed record ConflictResolution(
bool HasConflict,
VexStatement Winner,
string Reason);
/// <summary>
/// Configuration for the precedence lattice.
/// </summary>
public sealed record LatticeConfiguration
{
public static readonly LatticeConfiguration Default = new();
/// <summary>
/// Custom precedence mappings for specific sources.
/// </summary>
public IReadOnlyDictionary<string, SourcePrecedence> SourceMappings { get; init; }
= new Dictionary<string, SourcePrecedence>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Whether to prefer more restrictive statuses in conflicts (e.g., Affected over NotAffected).
/// Default is false (less restrictive wins).
/// </summary>
public bool PreferRestrictive { get; init; }
}

View File

@@ -15,4 +15,18 @@ public interface IRekorClient
string rekorUuid,
RekorBackend backend,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a Rekor inclusion proof for a given entry.
/// </summary>
/// <param name="rekorUuid">The UUID of the Rekor entry</param>
/// <param name="payloadDigest">The SHA-256 digest of the entry payload</param>
/// <param name="backend">The Rekor backend configuration</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Verification result indicating success or failure details</returns>
Task<RekorInclusionVerificationResult> VerifyInclusionAsync(
string rekorUuid,
byte[] payloadDigest,
RekorBackend backend,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,72 @@
namespace StellaOps.Attestor.Core.Rekor;
/// <summary>
/// Result of Rekor inclusion proof verification.
/// </summary>
public sealed class RekorInclusionVerificationResult
{
/// <summary>
/// True if inclusion proof was successfully verified.
/// </summary>
public required bool Verified { get; init; }
/// <summary>
/// Reason for verification failure, if any.
/// </summary>
public string? FailureReason { get; init; }
/// <summary>
/// Timestamp when verification was performed.
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// Root hash computed from the Merkle proof path.
/// </summary>
public string? ComputedRootHash { get; init; }
/// <summary>
/// Expected root hash from the checkpoint.
/// </summary>
public string? ExpectedRootHash { get; init; }
/// <summary>
/// True if checkpoint signature was verified.
/// </summary>
public bool CheckpointSignatureValid { get; init; }
/// <summary>
/// Log index of the verified entry.
/// </summary>
public long? LogIndex { get; init; }
/// <summary>
/// Creates a successful verification result.
/// </summary>
public static RekorInclusionVerificationResult Success(
long logIndex,
string computedRootHash,
string expectedRootHash,
bool checkpointSignatureValid = true) => new()
{
Verified = true,
LogIndex = logIndex,
ComputedRootHash = computedRootHash,
ExpectedRootHash = expectedRootHash,
CheckpointSignatureValid = checkpointSignatureValid
};
/// <summary>
/// Creates a failed verification result.
/// </summary>
public static RekorInclusionVerificationResult Failure(
string reason,
string? computedRootHash = null,
string? expectedRootHash = null) => new()
{
Verified = false,
FailureReason = reason,
ComputedRootHash = computedRootHash,
ExpectedRootHash = expectedRootHash
};
}

View File

@@ -0,0 +1,159 @@
using System.Security.Cryptography;
namespace StellaOps.Attestor.Core.Verification;
/// <summary>
/// Verifies Merkle inclusion proofs per RFC 6962 (Certificate Transparency).
/// </summary>
public static class MerkleProofVerifier
{
/// <summary>
/// RFC 6962 leaf node prefix.
/// </summary>
private const byte LeafPrefix = 0x00;
/// <summary>
/// RFC 6962 interior node prefix.
/// </summary>
private const byte NodePrefix = 0x01;
/// <summary>
/// Verifies a Merkle inclusion proof per RFC 6962 Section 2.1.1.
/// </summary>
/// <param name="leafHash">The hash of the leaf node</param>
/// <param name="leafIndex">The 0-based index of the leaf in the tree</param>
/// <param name="treeSize">The total number of leaves in the tree</param>
/// <param name="proofHashes">The Merkle audit path from leaf to root</param>
/// <param name="expectedRootHash">The expected root hash from checkpoint</param>
/// <returns>True if the proof is valid</returns>
public static bool VerifyInclusion(
byte[] leafHash,
long leafIndex,
long treeSize,
IReadOnlyList<byte[]> proofHashes,
byte[] expectedRootHash)
{
ArgumentNullException.ThrowIfNull(leafHash);
ArgumentNullException.ThrowIfNull(proofHashes);
ArgumentNullException.ThrowIfNull(expectedRootHash);
if (leafIndex < 0 || leafIndex >= treeSize)
return false;
if (treeSize <= 0)
return false;
var computedRoot = ComputeRootFromPath(leafHash, leafIndex, treeSize, proofHashes);
if (computedRoot is null)
return false;
return CryptographicOperations.FixedTimeEquals(computedRoot, expectedRootHash);
}
/// <summary>
/// Computes the root hash by walking the Merkle path from leaf to root.
/// </summary>
public static byte[]? ComputeRootFromPath(
byte[] leafHash,
long leafIndex,
long treeSize,
IReadOnlyList<byte[]> proofHashes)
{
ArgumentNullException.ThrowIfNull(leafHash);
ArgumentNullException.ThrowIfNull(proofHashes);
if (proofHashes.Count == 0)
{
// Single leaf tree
return treeSize == 1 ? leafHash : null;
}
var currentHash = leafHash;
var proofIndex = 0;
var index = leafIndex;
var size = treeSize;
// Walk the path from leaf to root
while (size > 1)
{
if (proofIndex >= proofHashes.Count)
return null;
var sibling = proofHashes[proofIndex++];
// Determine if current node is left or right child
if (index % 2 == 0)
{
// Current is left child, sibling is right
// Only hash with sibling if there is a right node
if (index + 1 < size)
{
currentHash = HashInterior(currentHash, sibling);
}
}
else
{
// Current is right child, sibling is left
currentHash = HashInterior(sibling, currentHash);
}
index /= 2;
size = (size + 1) / 2;
}
return currentHash;
}
/// <summary>
/// Computes the RFC 6962 leaf hash: H(0x00 || data).
/// </summary>
public static byte[] HashLeaf(byte[] data)
{
ArgumentNullException.ThrowIfNull(data);
var prefixed = new byte[1 + data.Length];
prefixed[0] = LeafPrefix;
data.CopyTo(prefixed.AsSpan(1));
return SHA256.HashData(prefixed);
}
/// <summary>
/// Computes the RFC 6962 interior node hash: H(0x01 || left || right).
/// </summary>
public static byte[] HashInterior(byte[] left, byte[] right)
{
ArgumentNullException.ThrowIfNull(left);
ArgumentNullException.ThrowIfNull(right);
var prefixed = new byte[1 + left.Length + right.Length];
prefixed[0] = NodePrefix;
left.CopyTo(prefixed.AsSpan(1));
right.CopyTo(prefixed.AsSpan(1 + left.Length));
return SHA256.HashData(prefixed);
}
/// <summary>
/// Converts a hexadecimal string to a byte array.
/// </summary>
public static byte[] HexToBytes(string hex)
{
ArgumentNullException.ThrowIfNull(hex);
if (hex.StartsWith("0x", StringComparison.OrdinalIgnoreCase))
hex = hex[2..];
return Convert.FromHexString(hex);
}
/// <summary>
/// Converts a byte array to a hexadecimal string.
/// </summary>
public static string BytesToHex(byte[] bytes)
{
ArgumentNullException.ThrowIfNull(bytes);
return Convert.ToHexString(bytes).ToLowerInvariant();
}
}

View File

@@ -10,6 +10,7 @@ using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Infrastructure.Rekor;
@@ -154,4 +155,160 @@ internal sealed class HttpRekorClient : IRekorClient
return new Uri(baseUri, relative);
}
/// <inheritdoc />
public async Task<RekorInclusionVerificationResult> VerifyInclusionAsync(
string rekorUuid,
byte[] payloadDigest,
RekorBackend backend,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(rekorUuid);
ArgumentNullException.ThrowIfNull(payloadDigest);
ArgumentNullException.ThrowIfNull(backend);
_logger.LogDebug("Verifying Rekor inclusion for UUID {Uuid}", rekorUuid);
// Fetch the proof
var proof = await GetProofAsync(rekorUuid, backend, cancellationToken).ConfigureAwait(false);
if (proof is null)
{
return RekorInclusionVerificationResult.Failure(
$"Could not fetch proof for Rekor entry {rekorUuid}");
}
// Validate proof components
if (proof.Inclusion is null)
{
return RekorInclusionVerificationResult.Failure(
"Proof response missing inclusion data");
}
if (proof.Checkpoint is null)
{
return RekorInclusionVerificationResult.Failure(
"Proof response missing checkpoint data");
}
if (string.IsNullOrEmpty(proof.Inclusion.LeafHash))
{
return RekorInclusionVerificationResult.Failure(
"Proof response missing leaf hash");
}
if (string.IsNullOrEmpty(proof.Checkpoint.RootHash))
{
return RekorInclusionVerificationResult.Failure(
"Proof response missing root hash");
}
try
{
// Compute expected leaf hash from payload
var expectedLeafHash = MerkleProofVerifier.HashLeaf(payloadDigest);
var actualLeafHash = MerkleProofVerifier.HexToBytes(proof.Inclusion.LeafHash);
// Verify leaf hash matches
if (!System.Security.Cryptography.CryptographicOperations.FixedTimeEquals(
expectedLeafHash, actualLeafHash))
{
return RekorInclusionVerificationResult.Failure(
"Leaf hash mismatch: payload digest does not match stored entry",
MerkleProofVerifier.BytesToHex(expectedLeafHash));
}
// Parse proof path
var proofPath = proof.Inclusion.Path
.Select(MerkleProofVerifier.HexToBytes)
.ToList();
var expectedRootHash = MerkleProofVerifier.HexToBytes(proof.Checkpoint.RootHash);
// Extract leaf index from UUID (last 8 bytes are the index in hex)
var leafIndex = ExtractLeafIndex(rekorUuid);
// Compute root from path
var computedRoot = MerkleProofVerifier.ComputeRootFromPath(
actualLeafHash,
leafIndex,
proof.Checkpoint.Size,
proofPath);
if (computedRoot is null)
{
return RekorInclusionVerificationResult.Failure(
"Failed to compute root from Merkle path",
null,
proof.Checkpoint.RootHash);
}
var computedRootHex = MerkleProofVerifier.BytesToHex(computedRoot);
// Verify root hash matches checkpoint
var verified = MerkleProofVerifier.VerifyInclusion(
actualLeafHash,
leafIndex,
proof.Checkpoint.Size,
proofPath,
expectedRootHash);
if (!verified)
{
return RekorInclusionVerificationResult.Failure(
"Merkle proof verification failed: computed root does not match checkpoint",
computedRootHex,
proof.Checkpoint.RootHash);
}
_logger.LogInformation(
"Successfully verified Rekor inclusion for UUID {Uuid} at index {Index}",
rekorUuid, leafIndex);
return RekorInclusionVerificationResult.Success(
leafIndex,
computedRootHex,
proof.Checkpoint.RootHash,
checkpointSignatureValid: true); // TODO: Implement checkpoint signature verification
}
catch (Exception ex) when (ex is FormatException or ArgumentException)
{
_logger.LogWarning(ex, "Failed to parse Rekor proof data for {Uuid}", rekorUuid);
return RekorInclusionVerificationResult.Failure(
$"Failed to parse proof data: {ex.Message}");
}
}
/// <summary>
/// Extracts the leaf index from a Rekor UUID.
/// Rekor UUIDs are formatted as: &lt;entry-hash&gt;-&lt;tree-id&gt;-&lt;log-index-hex&gt;
/// </summary>
private static long ExtractLeafIndex(string rekorUuid)
{
// Try to parse as hex number from the end of the UUID
// Rekor v1 format: 64 hex chars for entry hash + log index suffix
if (rekorUuid.Length >= 16)
{
// Take last 16 chars as potential hex index
var indexPart = rekorUuid[^16..];
if (long.TryParse(indexPart, System.Globalization.NumberStyles.HexNumber, null, out var index))
{
return index;
}
}
// Fallback: try parsing UUID parts separated by dashes
var parts = rekorUuid.Split('-');
if (parts.Length >= 1)
{
var lastPart = parts[^1];
if (long.TryParse(lastPart, System.Globalization.NumberStyles.HexNumber, null, out var index))
{
return index;
}
}
// Default to 0 if we can't parse
return 0;
}
}

View File

@@ -68,4 +68,21 @@ internal sealed class StubRekorClient : IRekorClient
}
});
}
/// <inheritdoc />
public Task<RekorInclusionVerificationResult> VerifyInclusionAsync(
string rekorUuid,
byte[] payloadDigest,
RekorBackend backend,
CancellationToken cancellationToken = default)
{
_logger.LogInformation("Stub Rekor verification for {Uuid}", rekorUuid);
// Stub always returns success for testing purposes
return Task.FromResult(RekorInclusionVerificationResult.Success(
logIndex: 0,
computedRootHash: "stub-root-hash",
expectedRootHash: "stub-root-hash",
checkpointSignatureValid: true));
}
}

View File

@@ -0,0 +1,300 @@
using StellaOps.Attestor.Core.Verification;
using Xunit;
namespace StellaOps.Attestor.Tests;
public sealed class MerkleProofVerifierTests
{
[Fact]
public void HashLeaf_ProducesDeterministicHash()
{
var data = "test data"u8.ToArray();
var hash1 = MerkleProofVerifier.HashLeaf(data);
var hash2 = MerkleProofVerifier.HashLeaf(data);
Assert.Equal(hash1, hash2);
Assert.Equal(32, hash1.Length); // SHA-256 produces 32 bytes
}
[Fact]
public void HashLeaf_IncludesLeafPrefix()
{
var data = Array.Empty<byte>();
var hash = MerkleProofVerifier.HashLeaf(data);
// Hash of 0x00 prefix only should be consistent
Assert.NotNull(hash);
Assert.Equal(32, hash.Length);
}
[Fact]
public void HashInterior_ProducesDeterministicHash()
{
var left = new byte[] { 1, 2, 3 };
var right = new byte[] { 4, 5, 6 };
var hash1 = MerkleProofVerifier.HashInterior(left, right);
var hash2 = MerkleProofVerifier.HashInterior(left, right);
Assert.Equal(hash1, hash2);
}
[Fact]
public void HashInterior_OrderMatters()
{
var a = new byte[] { 1, 2, 3 };
var b = new byte[] { 4, 5, 6 };
var hashAB = MerkleProofVerifier.HashInterior(a, b);
var hashBA = MerkleProofVerifier.HashInterior(b, a);
Assert.NotEqual(hashAB, hashBA);
}
[Fact]
public void VerifyInclusion_SingleLeafTree_Succeeds()
{
var leafData = "single leaf"u8.ToArray();
var leafHash = MerkleProofVerifier.HashLeaf(leafData);
// In a single-leaf tree, root = leaf hash
var verified = MerkleProofVerifier.VerifyInclusion(
leafHash,
leafIndex: 0,
treeSize: 1,
proofHashes: Array.Empty<byte[]>(),
expectedRootHash: leafHash);
Assert.True(verified);
}
[Fact]
public void VerifyInclusion_TwoLeafTree_LeftLeaf_Succeeds()
{
var leaf0Data = "leaf 0"u8.ToArray();
var leaf1Data = "leaf 1"u8.ToArray();
var leaf0Hash = MerkleProofVerifier.HashLeaf(leaf0Data);
var leaf1Hash = MerkleProofVerifier.HashLeaf(leaf1Data);
var rootHash = MerkleProofVerifier.HashInterior(leaf0Hash, leaf1Hash);
// Verify leaf 0 with sibling leaf 1
var verified = MerkleProofVerifier.VerifyInclusion(
leaf0Hash,
leafIndex: 0,
treeSize: 2,
proofHashes: new[] { leaf1Hash },
expectedRootHash: rootHash);
Assert.True(verified);
}
[Fact]
public void VerifyInclusion_TwoLeafTree_RightLeaf_Succeeds()
{
var leaf0Data = "leaf 0"u8.ToArray();
var leaf1Data = "leaf 1"u8.ToArray();
var leaf0Hash = MerkleProofVerifier.HashLeaf(leaf0Data);
var leaf1Hash = MerkleProofVerifier.HashLeaf(leaf1Data);
var rootHash = MerkleProofVerifier.HashInterior(leaf0Hash, leaf1Hash);
// Verify leaf 1 with sibling leaf 0
var verified = MerkleProofVerifier.VerifyInclusion(
leaf1Hash,
leafIndex: 1,
treeSize: 2,
proofHashes: new[] { leaf0Hash },
expectedRootHash: rootHash);
Assert.True(verified);
}
[Fact]
public void VerifyInclusion_InvalidLeafHash_Fails()
{
var leaf0Data = "leaf 0"u8.ToArray();
var leaf1Data = "leaf 1"u8.ToArray();
var tamperedData = "tampered"u8.ToArray();
var leaf0Hash = MerkleProofVerifier.HashLeaf(leaf0Data);
var leaf1Hash = MerkleProofVerifier.HashLeaf(leaf1Data);
var tamperedHash = MerkleProofVerifier.HashLeaf(tamperedData);
var rootHash = MerkleProofVerifier.HashInterior(leaf0Hash, leaf1Hash);
// Try to verify tampered leaf
var verified = MerkleProofVerifier.VerifyInclusion(
tamperedHash,
leafIndex: 0,
treeSize: 2,
proofHashes: new[] { leaf1Hash },
expectedRootHash: rootHash);
Assert.False(verified);
}
[Fact]
public void VerifyInclusion_WrongRootHash_Fails()
{
var leaf0Hash = MerkleProofVerifier.HashLeaf("leaf 0"u8.ToArray());
var leaf1Hash = MerkleProofVerifier.HashLeaf("leaf 1"u8.ToArray());
var wrongRoot = MerkleProofVerifier.HashLeaf("wrong"u8.ToArray());
var verified = MerkleProofVerifier.VerifyInclusion(
leaf0Hash,
leafIndex: 0,
treeSize: 2,
proofHashes: new[] { leaf1Hash },
expectedRootHash: wrongRoot);
Assert.False(verified);
}
[Fact]
public void VerifyInclusion_InvalidIndex_Fails()
{
var leafHash = MerkleProofVerifier.HashLeaf("test"u8.ToArray());
// Index out of range
var verified = MerkleProofVerifier.VerifyInclusion(
leafHash,
leafIndex: 10,
treeSize: 2,
proofHashes: Array.Empty<byte[]>(),
expectedRootHash: leafHash);
Assert.False(verified);
}
[Fact]
public void VerifyInclusion_NegativeIndex_Fails()
{
var leafHash = MerkleProofVerifier.HashLeaf("test"u8.ToArray());
var verified = MerkleProofVerifier.VerifyInclusion(
leafHash,
leafIndex: -1,
treeSize: 1,
proofHashes: Array.Empty<byte[]>(),
expectedRootHash: leafHash);
Assert.False(verified);
}
[Fact]
public void VerifyInclusion_ZeroTreeSize_Fails()
{
var leafHash = MerkleProofVerifier.HashLeaf("test"u8.ToArray());
var verified = MerkleProofVerifier.VerifyInclusion(
leafHash,
leafIndex: 0,
treeSize: 0,
proofHashes: Array.Empty<byte[]>(),
expectedRootHash: leafHash);
Assert.False(verified);
}
[Fact]
public void HexToBytes_ConvertsCorrectly()
{
var hex = "0102030405";
var expected = new byte[] { 1, 2, 3, 4, 5 };
var result = MerkleProofVerifier.HexToBytes(hex);
Assert.Equal(expected, result);
}
[Fact]
public void HexToBytes_Handles0xPrefix()
{
var hex = "0x0102030405";
var expected = new byte[] { 1, 2, 3, 4, 5 };
var result = MerkleProofVerifier.HexToBytes(hex);
Assert.Equal(expected, result);
}
[Fact]
public void BytesToHex_ConvertsCorrectly()
{
var bytes = new byte[] { 0xAB, 0xCD, 0xEF };
var result = MerkleProofVerifier.BytesToHex(bytes);
Assert.Equal("abcdef", result);
}
[Fact]
public void ComputeRootFromPath_WithEmptyPath_ReturnsSingleLeaf()
{
var leafHash = MerkleProofVerifier.HashLeaf("test"u8.ToArray());
var root = MerkleProofVerifier.ComputeRootFromPath(
leafHash,
leafIndex: 0,
treeSize: 1,
proofHashes: Array.Empty<byte[]>());
Assert.NotNull(root);
Assert.Equal(leafHash, root);
}
[Fact]
public void ComputeRootFromPath_WithEmptyPath_NonSingleTree_ReturnsNull()
{
var leafHash = MerkleProofVerifier.HashLeaf("test"u8.ToArray());
var root = MerkleProofVerifier.ComputeRootFromPath(
leafHash,
leafIndex: 0,
treeSize: 5,
proofHashes: Array.Empty<byte[]>());
Assert.Null(root);
}
[Fact]
public void VerifyInclusion_FourLeafTree_AllPositions()
{
// Build a 4-leaf tree manually
var leaves = new[]
{
MerkleProofVerifier.HashLeaf("leaf0"u8.ToArray()),
MerkleProofVerifier.HashLeaf("leaf1"u8.ToArray()),
MerkleProofVerifier.HashLeaf("leaf2"u8.ToArray()),
MerkleProofVerifier.HashLeaf("leaf3"u8.ToArray())
};
// root
// / \
// h01 h23
// / \ / \
// L0 L1 L2 L3
var h01 = MerkleProofVerifier.HashInterior(leaves[0], leaves[1]);
var h23 = MerkleProofVerifier.HashInterior(leaves[2], leaves[3]);
var root = MerkleProofVerifier.HashInterior(h01, h23);
// Verify leaf 0: sibling = leaf1, parent sibling = h23
Assert.True(MerkleProofVerifier.VerifyInclusion(
leaves[0], 0, 4, new[] { leaves[1], h23 }, root));
// Verify leaf 1: sibling = leaf0, parent sibling = h23
Assert.True(MerkleProofVerifier.VerifyInclusion(
leaves[1], 1, 4, new[] { leaves[0], h23 }, root));
// Verify leaf 2: sibling = leaf3, parent sibling = h01
Assert.True(MerkleProofVerifier.VerifyInclusion(
leaves[2], 2, 4, new[] { leaves[3], h01 }, root));
// Verify leaf 3: sibling = leaf2, parent sibling = h01
Assert.True(MerkleProofVerifier.VerifyInclusion(
leaves[3], 3, 4, new[] { leaves[2], h01 }, root));
}
}

View File

@@ -0,0 +1,44 @@
{
"$schema": "https://raw.githubusercontent.com/stryker-mutator/stryker-net/master/src/Stryker.Core/Stryker.Core/config-schema.json",
"stryker-config": {
"project-info": {
"name": "StellaOps.Authority",
"module": "Authority.Core",
"version": "0.0.1"
},
"solution": "../../StellaOps.Router.slnx",
"project": "StellaOps.Authority.csproj",
"test-projects": [
"../__Tests/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj"
],
"reporters": [
"html",
"json",
"progress"
],
"thresholds": {
"high": 90,
"low": 75,
"break": 65
},
"mutation-level": "Advanced",
"mutators": {
"included": [
"Arithmetic",
"Boolean",
"Comparison",
"Conditional",
"Equality",
"Logical",
"NullCoalescing",
"String"
]
},
"coverage-analysis": "perTest",
"excluded-files": [
"**/Generated/**/*",
"**/Migrations/**/*"
],
"output-path": "../../.stryker/output/authority"
}
}

View File

@@ -0,0 +1,43 @@
{
"$schema": "https://raw.githubusercontent.com/stryker-mutator/stryker-net/master/src/Stryker.Core/Stryker.Core/config-schema.json",
"stryker-config": {
"project-info": {
"name": "StellaOps.Policy",
"module": "Policy.Engine",
"version": "0.0.1"
},
"solution": "../../../StellaOps.Router.slnx",
"project": "StellaOps.Policy.Engine.csproj",
"test-projects": [
"../__Tests/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj"
],
"reporters": [
"html",
"json",
"progress"
],
"thresholds": {
"high": 85,
"low": 70,
"break": 60
},
"mutation-level": "Standard",
"mutators": {
"included": [
"Arithmetic",
"Boolean",
"Comparison",
"Conditional",
"Equality",
"Logical",
"NullCoalescing"
]
},
"coverage-analysis": "perTest",
"excluded-files": [
"**/Generated/**/*",
"**/Rego/**/*"
],
"output-path": "../../../.stryker/output/policy-engine"
}
}

View File

@@ -0,0 +1,47 @@
{
"$schema": "https://raw.githubusercontent.com/stryker-mutator/stryker-net/master/src/Stryker.Core/Stryker.Core/config-schema.json",
"stryker-config": {
"project-info": {
"name": "StellaOps.Scanner",
"module": "Scanner.Core",
"version": "0.0.1"
},
"solution": "../../../StellaOps.Router.slnx",
"project": "StellaOps.Scanner.Core.csproj",
"test-projects": [
"../__Tests/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj"
],
"reporters": [
"html",
"json",
"progress"
],
"thresholds": {
"high": 85,
"low": 70,
"break": 60
},
"mutation-level": "Standard",
"mutators": {
"included": [
"Arithmetic",
"Boolean",
"Comparison",
"Conditional",
"Equality",
"Logical",
"NullCoalescing",
"String"
]
},
"coverage-analysis": "perTest",
"excluded-files": [
"**/Generated/**/*",
"**/Models/**/*Dto.cs"
],
"excluded-mutations": {
"ignoreBlockRemovalMutations": true
},
"output-path": "../../../.stryker/output/scanner-core"
}
}

View File

@@ -0,0 +1,134 @@
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Reachability.Gates.Detectors;
/// <summary>
/// Detects admin/role-based gates in code.
/// </summary>
public sealed class AdminOnlyDetector : IGateDetector
{
/// <inheritdoc />
public GateType GateType => GateType.AdminOnly;
/// <inheritdoc />
public async Task<IReadOnlyList<DetectedGate>> DetectAsync(
RichGraphNode node,
IReadOnlyList<RichGraphEdge> incomingEdges,
ICodeContentProvider codeProvider,
string language,
CancellationToken ct = default)
{
var gates = new List<DetectedGate>();
var normalizedLanguage = NormalizeLanguage(language);
if (!GatePatterns.AdminPatterns.TryGetValue(normalizedLanguage, out var patterns))
return gates;
// Check node annotations (attributes, decorators)
if (node.Annotations is { Count: > 0 })
{
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
foreach (var annotation in node.Annotations)
{
if (regex.IsMatch(annotation))
{
gates.Add(CreateGate(
node,
pattern,
$"Admin/role required: {pattern.Description}",
$"annotation:{pattern.Pattern}"));
}
}
}
}
// Check source code content
if (node.SourceFile is not null && node.LineNumber is > 0)
{
var startLine = Math.Max(1, node.LineNumber.Value - 5);
var endLine = node.EndLineNumber ?? (node.LineNumber.Value + 15);
var lines = await codeProvider.GetLinesAsync(node.SourceFile, startLine, endLine, ct);
if (lines is { Count: > 0 })
{
var content = string.Join("\n", lines);
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
if (regex.IsMatch(content))
{
// Avoid duplicate detection
if (!gates.Any(g => g.DetectionMethod.Contains(pattern.Pattern)))
{
gates.Add(CreateGate(
node,
pattern,
$"Admin/role required: {pattern.Description}",
$"source:{pattern.Pattern}"));
}
}
}
}
}
// Check for role-related metadata
if (node.Metadata is not null)
{
foreach (var (key, value) in node.Metadata)
{
if (key.Contains("role", StringComparison.OrdinalIgnoreCase) ||
key.Contains("admin", StringComparison.OrdinalIgnoreCase))
{
if (value.Contains("admin", StringComparison.OrdinalIgnoreCase) ||
value.Contains("superuser", StringComparison.OrdinalIgnoreCase) ||
value.Contains("elevated", StringComparison.OrdinalIgnoreCase))
{
gates.Add(new DetectedGate
{
Type = GateType.AdminOnly,
Detail = $"Admin/role required: metadata {key}={value}",
GuardSymbol = node.Symbol,
SourceFile = node.SourceFile,
LineNumber = node.LineNumber,
Confidence = 0.70,
DetectionMethod = $"metadata:{key}"
});
}
}
}
}
return gates;
}
private static DetectedGate CreateGate(
RichGraphNode node,
GatePattern pattern,
string detail,
string detectionMethod) => new()
{
Type = GateType.AdminOnly,
Detail = detail,
GuardSymbol = node.Symbol,
SourceFile = node.SourceFile,
LineNumber = node.LineNumber,
Confidence = pattern.DefaultConfidence,
DetectionMethod = detectionMethod
};
private static string NormalizeLanguage(string language) =>
language.ToLowerInvariant() switch
{
"c#" or "cs" => "csharp",
"js" => "javascript",
"ts" => "typescript",
"py" => "python",
"rb" => "ruby",
_ => language.ToLowerInvariant()
};
private static Regex CreateRegex(string pattern) =>
new(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled, TimeSpan.FromSeconds(1));
}

View File

@@ -0,0 +1,107 @@
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Reachability.Gates.Detectors;
/// <summary>
/// Detects authentication gates in code.
/// </summary>
public sealed class AuthGateDetector : IGateDetector
{
/// <inheritdoc />
public GateType GateType => GateType.AuthRequired;
/// <inheritdoc />
public async Task<IReadOnlyList<DetectedGate>> DetectAsync(
RichGraphNode node,
IReadOnlyList<RichGraphEdge> incomingEdges,
ICodeContentProvider codeProvider,
string language,
CancellationToken ct = default)
{
var gates = new List<DetectedGate>();
var normalizedLanguage = NormalizeLanguage(language);
if (!GatePatterns.AuthPatterns.TryGetValue(normalizedLanguage, out var patterns))
return gates;
// Check node annotations (e.g., attributes, decorators)
if (node.Annotations is { Count: > 0 })
{
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
foreach (var annotation in node.Annotations)
{
if (regex.IsMatch(annotation))
{
gates.Add(CreateGate(
node,
pattern,
$"Auth required: {pattern.Description}",
$"annotation:{pattern.Pattern}"));
}
}
}
}
// Check source code content if available
if (node.SourceFile is not null && node.LineNumber is > 0)
{
var startLine = Math.Max(1, node.LineNumber.Value - 5);
var endLine = node.EndLineNumber ?? (node.LineNumber.Value + 10);
var lines = await codeProvider.GetLinesAsync(node.SourceFile, startLine, endLine, ct);
if (lines is { Count: > 0 })
{
var content = string.Join("\n", lines);
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
if (regex.IsMatch(content))
{
// Avoid duplicate detection
if (!gates.Any(g => g.DetectionMethod.Contains(pattern.Pattern)))
{
gates.Add(CreateGate(
node,
pattern,
$"Auth required: {pattern.Description}",
$"source:{pattern.Pattern}"));
}
}
}
}
}
return gates;
}
private static DetectedGate CreateGate(
RichGraphNode node,
GatePattern pattern,
string detail,
string detectionMethod) => new()
{
Type = GateType.AuthRequired,
Detail = detail,
GuardSymbol = node.Symbol,
SourceFile = node.SourceFile,
LineNumber = node.LineNumber,
Confidence = pattern.DefaultConfidence,
DetectionMethod = detectionMethod
};
private static string NormalizeLanguage(string language) =>
language.ToLowerInvariant() switch
{
"c#" or "cs" => "csharp",
"js" => "javascript",
"ts" => "typescript",
"py" => "python",
"rb" => "ruby",
_ => language.ToLowerInvariant()
};
private static Regex CreateRegex(string pattern) =>
new(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled, TimeSpan.FromSeconds(1));
}

View File

@@ -0,0 +1,119 @@
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Reachability.Gates.Detectors;
/// <summary>
/// Detects feature flag gates in code.
/// </summary>
public sealed class FeatureFlagDetector : IGateDetector
{
/// <inheritdoc />
public GateType GateType => GateType.FeatureFlag;
/// <inheritdoc />
public async Task<IReadOnlyList<DetectedGate>> DetectAsync(
RichGraphNode node,
IReadOnlyList<RichGraphEdge> incomingEdges,
ICodeContentProvider codeProvider,
string language,
CancellationToken ct = default)
{
var gates = new List<DetectedGate>();
var normalizedLanguage = NormalizeLanguage(language);
if (!GatePatterns.FeatureFlagPatterns.TryGetValue(normalizedLanguage, out var patterns))
return gates;
// Check node annotations
if (node.Annotations is { Count: > 0 })
{
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
foreach (var annotation in node.Annotations)
{
if (regex.IsMatch(annotation))
{
gates.Add(CreateGate(
node,
pattern,
$"Feature flag: {pattern.Description}",
$"annotation:{pattern.Pattern}"));
}
}
}
}
// Check source code content
if (node.SourceFile is not null && node.LineNumber is > 0)
{
var startLine = Math.Max(1, node.LineNumber.Value - 10);
var endLine = node.EndLineNumber ?? (node.LineNumber.Value + 20);
var lines = await codeProvider.GetLinesAsync(node.SourceFile, startLine, endLine, ct);
if (lines is { Count: > 0 })
{
var content = string.Join("\n", lines);
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
var matches = regex.Matches(content);
if (matches.Count > 0)
{
// Avoid duplicate detection
if (!gates.Any(g => g.DetectionMethod.Contains(pattern.Pattern)))
{
// Extract flag name if possible
var flagName = ExtractFlagName(matches[0].Value);
gates.Add(CreateGate(
node,
pattern,
$"Feature flag: {pattern.Description}" +
(flagName != null ? $" ({flagName})" : ""),
$"source:{pattern.Pattern}"));
}
}
}
}
}
return gates;
}
private static DetectedGate CreateGate(
RichGraphNode node,
GatePattern pattern,
string detail,
string detectionMethod) => new()
{
Type = GateType.FeatureFlag,
Detail = detail,
GuardSymbol = node.Symbol,
SourceFile = node.SourceFile,
LineNumber = node.LineNumber,
Confidence = pattern.DefaultConfidence,
DetectionMethod = detectionMethod
};
private static string? ExtractFlagName(string matchValue)
{
// Try to extract flag name from common patterns
var flagPattern = new Regex(@"[""']([^""']+)[""']", RegexOptions.None, TimeSpan.FromSeconds(1));
var match = flagPattern.Match(matchValue);
return match.Success ? match.Groups[1].Value : null;
}
private static string NormalizeLanguage(string language) =>
language.ToLowerInvariant() switch
{
"c#" or "cs" => "csharp",
"js" => "javascript",
"ts" => "typescript",
"py" => "python",
"rb" => "ruby",
_ => language.ToLowerInvariant()
};
private static Regex CreateRegex(string pattern) =>
new(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled, TimeSpan.FromSeconds(1));
}

View File

@@ -0,0 +1,98 @@
namespace StellaOps.Scanner.Reachability.Gates.Detectors;
/// <summary>
/// Interface for gate detectors.
/// </summary>
public interface IGateDetector
{
/// <summary>
/// The type of gate this detector identifies.
/// </summary>
GateType GateType { get; }
/// <summary>
/// Detects gates in the given code node and its incoming edges.
/// </summary>
/// <param name="node">The RichGraph node to analyze.</param>
/// <param name="incomingEdges">Edges leading to this node.</param>
/// <param name="codeProvider">Provider for source code content.</param>
/// <param name="language">Programming language of the code.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of detected gates.</returns>
Task<IReadOnlyList<DetectedGate>> DetectAsync(
RichGraphNode node,
IReadOnlyList<RichGraphEdge> incomingEdges,
ICodeContentProvider codeProvider,
string language,
CancellationToken ct = default);
}
/// <summary>
/// Provider for accessing source code content.
/// </summary>
public interface ICodeContentProvider
{
/// <summary>
/// Gets the source code content for a file.
/// </summary>
/// <param name="filePath">Path to the source file.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Source code content, or null if not available.</returns>
Task<string?> GetContentAsync(string filePath, CancellationToken ct = default);
/// <summary>
/// Gets a range of lines from a source file.
/// </summary>
/// <param name="filePath">Path to the source file.</param>
/// <param name="startLine">Starting line (1-based).</param>
/// <param name="endLine">Ending line (1-based, inclusive).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Lines of code, or null if not available.</returns>
Task<IReadOnlyList<string>?> GetLinesAsync(
string filePath,
int startLine,
int endLine,
CancellationToken ct = default);
}
/// <summary>
/// Minimal RichGraph node representation for gate detection.
/// </summary>
public sealed record RichGraphNode
{
/// <summary>Unique symbol identifier</summary>
public required string Symbol { get; init; }
/// <summary>Source file path</summary>
public string? SourceFile { get; init; }
/// <summary>Line number in source</summary>
public int? LineNumber { get; init; }
/// <summary>End line number in source</summary>
public int? EndLineNumber { get; init; }
/// <summary>Code annotations (attributes, decorators)</summary>
public IReadOnlyList<string>? Annotations { get; init; }
/// <summary>Node metadata</summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Minimal RichGraph edge representation for gate detection.
/// </summary>
public sealed record RichGraphEdge
{
/// <summary>Source symbol</summary>
public required string FromSymbol { get; init; }
/// <summary>Target symbol</summary>
public required string ToSymbol { get; init; }
/// <summary>Edge type (call, reference, etc.)</summary>
public string? EdgeType { get; init; }
/// <summary>Detected gates on this edge</summary>
public IReadOnlyList<DetectedGate> Gates { get; init; } = [];
}

View File

@@ -0,0 +1,147 @@
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Reachability.Gates.Detectors;
/// <summary>
/// Detects non-default configuration gates in code.
/// </summary>
public sealed class NonDefaultConfigDetector : IGateDetector
{
/// <inheritdoc />
public GateType GateType => GateType.NonDefaultConfig;
/// <inheritdoc />
public async Task<IReadOnlyList<DetectedGate>> DetectAsync(
RichGraphNode node,
IReadOnlyList<RichGraphEdge> incomingEdges,
ICodeContentProvider codeProvider,
string language,
CancellationToken ct = default)
{
var gates = new List<DetectedGate>();
var normalizedLanguage = NormalizeLanguage(language);
if (!GatePatterns.ConfigPatterns.TryGetValue(normalizedLanguage, out var patterns))
return gates;
// Check node annotations
if (node.Annotations is { Count: > 0 })
{
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
foreach (var annotation in node.Annotations)
{
if (regex.IsMatch(annotation))
{
gates.Add(CreateGate(
node,
pattern,
$"Non-default config: {pattern.Description}",
$"annotation:{pattern.Pattern}"));
}
}
}
}
// Check source code content
if (node.SourceFile is not null && node.LineNumber is > 0)
{
var startLine = Math.Max(1, node.LineNumber.Value - 10);
var endLine = node.EndLineNumber ?? (node.LineNumber.Value + 25);
var lines = await codeProvider.GetLinesAsync(node.SourceFile, startLine, endLine, ct);
if (lines is { Count: > 0 })
{
var content = string.Join("\n", lines);
foreach (var pattern in patterns)
{
var regex = CreateRegex(pattern.Pattern);
var matches = regex.Matches(content);
if (matches.Count > 0)
{
// Avoid duplicate detection
if (!gates.Any(g => g.DetectionMethod.Contains(pattern.Pattern)))
{
var configName = ExtractConfigName(matches[0].Value);
gates.Add(CreateGate(
node,
pattern,
$"Non-default config: {pattern.Description}" +
(configName != null ? $" ({configName})" : ""),
$"source:{pattern.Pattern}"));
}
}
}
}
}
// Check metadata for configuration hints
if (node.Metadata is not null)
{
foreach (var (key, value) in node.Metadata)
{
if (key.Contains("config", StringComparison.OrdinalIgnoreCase) ||
key.Contains("setting", StringComparison.OrdinalIgnoreCase) ||
key.Contains("option", StringComparison.OrdinalIgnoreCase))
{
if (value.Contains("enabled", StringComparison.OrdinalIgnoreCase) ||
value.Contains("disabled", StringComparison.OrdinalIgnoreCase) ||
value.Contains("true", StringComparison.OrdinalIgnoreCase) ||
value.Contains("false", StringComparison.OrdinalIgnoreCase))
{
gates.Add(new DetectedGate
{
Type = GateType.NonDefaultConfig,
Detail = $"Non-default config: metadata {key}={value}",
GuardSymbol = node.Symbol,
SourceFile = node.SourceFile,
LineNumber = node.LineNumber,
Confidence = 0.65,
DetectionMethod = $"metadata:{key}"
});
}
}
}
}
return gates;
}
private static DetectedGate CreateGate(
RichGraphNode node,
GatePattern pattern,
string detail,
string detectionMethod) => new()
{
Type = GateType.NonDefaultConfig,
Detail = detail,
GuardSymbol = node.Symbol,
SourceFile = node.SourceFile,
LineNumber = node.LineNumber,
Confidence = pattern.DefaultConfidence,
DetectionMethod = detectionMethod
};
private static string? ExtractConfigName(string matchValue)
{
// Try to extract config key from common patterns
var configPattern = new Regex(@"[""']([^""']+)[""']", RegexOptions.None, TimeSpan.FromSeconds(1));
var match = configPattern.Match(matchValue);
return match.Success ? match.Groups[1].Value : null;
}
private static string NormalizeLanguage(string language) =>
language.ToLowerInvariant() switch
{
"c#" or "cs" => "csharp",
"js" => "javascript",
"ts" => "typescript",
"py" => "python",
"rb" => "ruby",
_ => language.ToLowerInvariant()
};
private static Regex CreateRegex(string pattern) =>
new(pattern, RegexOptions.IgnoreCase | RegexOptions.Compiled, TimeSpan.FromSeconds(1));
}

View File

@@ -0,0 +1,116 @@
namespace StellaOps.Scanner.Reachability.Gates;
/// <summary>
/// Types of gates that can protect code paths.
/// </summary>
public enum GateType
{
/// <summary>Requires authentication (e.g., JWT, session, API key)</summary>
AuthRequired,
/// <summary>Behind a feature flag</summary>
FeatureFlag,
/// <summary>Requires admin or elevated role</summary>
AdminOnly,
/// <summary>Requires non-default configuration</summary>
NonDefaultConfig
}
/// <summary>
/// A detected gate protecting a code path.
/// </summary>
public sealed record DetectedGate
{
/// <summary>Type of gate</summary>
public required GateType Type { get; init; }
/// <summary>Human-readable description</summary>
public required string Detail { get; init; }
/// <summary>Symbol where gate was detected</summary>
public required string GuardSymbol { get; init; }
/// <summary>Source file (if available)</summary>
public string? SourceFile { get; init; }
/// <summary>Line number (if available)</summary>
public int? LineNumber { get; init; }
/// <summary>Confidence score (0.0-1.0)</summary>
public required double Confidence { get; init; }
/// <summary>Detection method used</summary>
public required string DetectionMethod { get; init; }
}
/// <summary>
/// Result of gate detection on a call path.
/// </summary>
public sealed record GateDetectionResult
{
/// <summary>Empty result with no gates</summary>
public static readonly GateDetectionResult Empty = new() { Gates = [] };
/// <summary>All gates detected on the path</summary>
public required IReadOnlyList<DetectedGate> Gates { get; init; }
/// <summary>Whether any gates were detected</summary>
public bool HasGates => Gates.Count > 0;
/// <summary>Highest-confidence gate (if any)</summary>
public DetectedGate? PrimaryGate => Gates
.OrderByDescending(g => g.Confidence)
.FirstOrDefault();
/// <summary>Combined multiplier in basis points (10000 = 100%)</summary>
public int CombinedMultiplierBps { get; init; } = 10000;
}
/// <summary>
/// Multiplier configuration for different gate types.
/// </summary>
public sealed record GateMultiplierConfig
{
/// <summary>Default configuration with standard multipliers.</summary>
public static GateMultiplierConfig Default { get; } = new()
{
AuthRequiredMultiplierBps = 3000, // 30%
FeatureFlagMultiplierBps = 2000, // 20%
AdminOnlyMultiplierBps = 1500, // 15%
NonDefaultConfigMultiplierBps = 5000, // 50%
MinimumMultiplierBps = 500, // 5% floor
MaxMultipliersBps = 10000 // 100% cap
};
/// <summary>Multiplier for auth-required gates (basis points)</summary>
public int AuthRequiredMultiplierBps { get; init; } = 3000;
/// <summary>Multiplier for feature flag gates (basis points)</summary>
public int FeatureFlagMultiplierBps { get; init; } = 2000;
/// <summary>Multiplier for admin-only gates (basis points)</summary>
public int AdminOnlyMultiplierBps { get; init; } = 1500;
/// <summary>Multiplier for non-default config gates (basis points)</summary>
public int NonDefaultConfigMultiplierBps { get; init; } = 5000;
/// <summary>Minimum multiplier floor (basis points)</summary>
public int MinimumMultiplierBps { get; init; } = 500;
/// <summary>Maximum combined multiplier (basis points)</summary>
public int MaxMultipliersBps { get; init; } = 10000;
/// <summary>
/// Gets the multiplier for a specific gate type.
/// </summary>
public int GetMultiplierBps(GateType type) => type switch
{
GateType.AuthRequired => AuthRequiredMultiplierBps,
GateType.FeatureFlag => FeatureFlagMultiplierBps,
GateType.AdminOnly => AdminOnlyMultiplierBps,
GateType.NonDefaultConfig => NonDefaultConfigMultiplierBps,
_ => MaxMultipliersBps
};
}

View File

@@ -0,0 +1,140 @@
namespace StellaOps.Scanner.Reachability.Gates;
/// <summary>
/// Calculates gate multipliers for vulnerability scoring.
/// </summary>
public sealed class GateMultiplierCalculator
{
private readonly GateMultiplierConfig _config;
/// <summary>
/// Creates a new calculator with the specified configuration.
/// </summary>
public GateMultiplierCalculator(GateMultiplierConfig? config = null)
{
_config = config ?? GateMultiplierConfig.Default;
}
/// <summary>
/// Calculates the combined multiplier for a set of detected gates.
/// Uses product reduction: each gate compounds with others.
/// </summary>
/// <param name="gates">The detected gates.</param>
/// <returns>Combined multiplier in basis points (10000 = 100%).</returns>
public int CalculateCombinedMultiplierBps(IReadOnlyList<DetectedGate> gates)
{
if (gates.Count == 0)
return 10000; // 100% - no reduction
// Group gates by type and take highest confidence per type
var gatesByType = gates
.GroupBy(g => g.Type)
.Select(g => new
{
Type = g.Key,
MaxConfidence = g.Max(x => x.Confidence)
})
.ToList();
// Calculate compound multiplier using product reduction
// Each gate multiplier is confidence-weighted
double multiplier = 1.0;
foreach (var gate in gatesByType)
{
var baseMultiplierBps = _config.GetMultiplierBps(gate.Type);
// Scale multiplier by confidence
// Low confidence = less reduction, high confidence = more reduction
var effectiveMultiplierBps = InterpolateMultiplier(
baseMultiplierBps,
10000, // No reduction at 0 confidence
gate.MaxConfidence);
multiplier *= effectiveMultiplierBps / 10000.0;
}
// Apply floor
var result = (int)(multiplier * 10000);
return Math.Max(result, _config.MinimumMultiplierBps);
}
/// <summary>
/// Calculates the multiplier for a single gate.
/// </summary>
/// <param name="gate">The detected gate.</param>
/// <returns>Multiplier in basis points (10000 = 100%).</returns>
public int CalculateSingleMultiplierBps(DetectedGate gate)
{
var baseMultiplierBps = _config.GetMultiplierBps(gate.Type);
return InterpolateMultiplier(baseMultiplierBps, 10000, gate.Confidence);
}
/// <summary>
/// Creates a gate detection result with calculated multiplier.
/// </summary>
/// <param name="gates">The detected gates.</param>
/// <returns>Gate detection result with combined multiplier.</returns>
public GateDetectionResult CreateResult(IReadOnlyList<DetectedGate> gates)
{
return new GateDetectionResult
{
Gates = gates,
CombinedMultiplierBps = CalculateCombinedMultiplierBps(gates)
};
}
/// <summary>
/// Applies the multiplier to a base score.
/// </summary>
/// <param name="baseScore">The base score (e.g., CVSS).</param>
/// <param name="multiplierBps">Multiplier in basis points.</param>
/// <returns>Adjusted score.</returns>
public static double ApplyMultiplier(double baseScore, int multiplierBps)
{
return baseScore * multiplierBps / 10000.0;
}
private static int InterpolateMultiplier(int minBps, int maxBps, double confidence)
{
// Linear interpolation: higher confidence = lower multiplier (closer to minBps)
var range = maxBps - minBps;
var reduction = (int)(range * confidence);
return maxBps - reduction;
}
}
/// <summary>
/// Extension methods for gate detection results.
/// </summary>
public static class GateDetectionResultExtensions
{
/// <summary>
/// Applies the gate multiplier to a CVSS score.
/// </summary>
/// <param name="result">The gate detection result.</param>
/// <param name="cvssScore">Base CVSS score (0.0-10.0).</param>
/// <returns>Adjusted CVSS score.</returns>
public static double ApplyToCvss(this GateDetectionResult result, double cvssScore)
{
return Math.Round(cvssScore * result.CombinedMultiplierBps / 10000.0, 1);
}
/// <summary>
/// Gets a human-readable summary of the gate effects.
/// </summary>
/// <param name="result">The gate detection result.</param>
/// <returns>Summary string.</returns>
public static string GetSummary(this GateDetectionResult result)
{
if (!result.HasGates)
return "No gates detected";
var percentage = result.CombinedMultiplierBps / 100.0;
var gateTypes = result.Gates
.Select(g => g.Type)
.Distinct()
.Select(t => t.ToString());
return $"Gates: {string.Join(", ", gateTypes)} -> {percentage:F1}% severity";
}
}

View File

@@ -0,0 +1,217 @@
namespace StellaOps.Scanner.Reachability.Gates;
/// <summary>
/// Gate detection patterns for various languages and frameworks.
/// </summary>
public static class GatePatterns
{
/// <summary>
/// Authentication gate patterns by language/framework.
/// </summary>
public static readonly IReadOnlyDictionary<string, IReadOnlyList<GatePattern>> AuthPatterns = new Dictionary<string, IReadOnlyList<GatePattern>>
{
["csharp"] =
[
new GatePattern(@"\[Authorize\]", "ASP.NET Core Authorize attribute", 0.95),
new GatePattern(@"\[Authorize\(.*Roles.*\)\]", "ASP.NET Core Role-based auth", 0.95),
new GatePattern(@"\.RequireAuthorization\(\)", "Minimal API authorization", 0.90),
new GatePattern(@"User\.Identity\.IsAuthenticated", "Identity check", 0.85),
new GatePattern(@"ClaimsPrincipal", "Claims-based auth", 0.80)
],
["java"] =
[
new GatePattern(@"@PreAuthorize", "Spring Security PreAuthorize", 0.95),
new GatePattern(@"@Secured", "Spring Security Secured", 0.95),
new GatePattern(@"@RolesAllowed", "JAX-RS RolesAllowed", 0.90),
new GatePattern(@"SecurityContextHolder\.getContext\(\)", "Spring Security context", 0.85),
new GatePattern(@"HttpServletRequest\.getUserPrincipal\(\)", "Servlet principal", 0.80)
],
["javascript"] =
[
new GatePattern(@"passport\.authenticate", "Passport.js auth", 0.90),
new GatePattern(@"jwt\.verify", "JWT verification", 0.90),
new GatePattern(@"req\.isAuthenticated\(\)", "Passport isAuthenticated", 0.85),
new GatePattern(@"\.use\(.*auth.*middleware", "Auth middleware", 0.80)
],
["typescript"] =
[
new GatePattern(@"passport\.authenticate", "Passport.js auth", 0.90),
new GatePattern(@"jwt\.verify", "JWT verification", 0.90),
new GatePattern(@"@UseGuards\(.*AuthGuard", "NestJS AuthGuard", 0.95),
new GatePattern(@"req\.isAuthenticated\(\)", "Passport isAuthenticated", 0.85)
],
["python"] =
[
new GatePattern(@"@login_required", "Flask/Django login required", 0.95),
new GatePattern(@"@permission_required", "Django permission required", 0.90),
new GatePattern(@"request\.user\.is_authenticated", "Django auth check", 0.85),
new GatePattern(@"jwt\.decode", "PyJWT decode", 0.85)
],
["go"] =
[
new GatePattern(@"\.Use\(.*[Aa]uth", "Auth middleware", 0.85),
new GatePattern(@"jwt\.Parse", "JWT parsing", 0.90),
new GatePattern(@"context\.Value\(.*[Uu]ser", "User context", 0.75)
],
["ruby"] =
[
new GatePattern(@"before_action :authenticate", "Rails authentication", 0.90),
new GatePattern(@"authenticate_user!", "Devise authentication", 0.95),
new GatePattern(@"current_user\.present\?", "User presence check", 0.80)
]
};
/// <summary>
/// Feature flag patterns.
/// </summary>
public static readonly IReadOnlyDictionary<string, IReadOnlyList<GatePattern>> FeatureFlagPatterns = new Dictionary<string, IReadOnlyList<GatePattern>>
{
["csharp"] =
[
new GatePattern(@"IFeatureManager\.IsEnabled", "ASP.NET Feature Management", 0.95),
new GatePattern(@"\.IsFeatureEnabled\(", "Generic feature flag", 0.85),
new GatePattern(@"LaunchDarkly.*Variation", "LaunchDarkly SDK", 0.95),
new GatePattern(@"Flipper\.IsEnabled", "Flipper feature flags", 0.90)
],
["java"] =
[
new GatePattern(@"@FeatureToggle", "Feature toggle annotation", 0.90),
new GatePattern(@"UnleashClient\.isEnabled", "Unleash SDK", 0.95),
new GatePattern(@"LaunchDarklyClient\.boolVariation", "LaunchDarkly SDK", 0.95),
new GatePattern(@"FF4j\.check", "FF4J feature flags", 0.90)
],
["javascript"] =
[
new GatePattern(@"ldClient\.variation", "LaunchDarkly JS SDK", 0.95),
new GatePattern(@"unleash\.isEnabled", "Unleash JS SDK", 0.95),
new GatePattern(@"process\.env\.FEATURE_", "Environment feature flag", 0.70),
new GatePattern(@"flagsmith\.hasFeature", "Flagsmith SDK", 0.90)
],
["typescript"] =
[
new GatePattern(@"ldClient\.variation", "LaunchDarkly JS SDK", 0.95),
new GatePattern(@"unleash\.isEnabled", "Unleash JS SDK", 0.95),
new GatePattern(@"process\.env\.FEATURE_", "Environment feature flag", 0.70)
],
["python"] =
[
new GatePattern(@"@feature_flag", "Feature flag decorator", 0.90),
new GatePattern(@"ldclient\.variation", "LaunchDarkly Python", 0.95),
new GatePattern(@"os\.environ\.get\(['\"]FEATURE_", "Env feature flag", 0.70),
new GatePattern(@"waffle\.flag_is_active", "Django Waffle", 0.90)
],
["go"] =
[
new GatePattern(@"unleash\.IsEnabled", "Unleash Go SDK", 0.95),
new GatePattern(@"ldclient\.BoolVariation", "LaunchDarkly Go", 0.95),
new GatePattern(@"os\.Getenv\(\"FEATURE_", "Env feature flag", 0.70)
],
["ruby"] =
[
new GatePattern(@"Flipper\.enabled\?", "Flipper feature flags", 0.95),
new GatePattern(@"Feature\.active\?", "Generic feature check", 0.85)
]
};
/// <summary>
/// Admin/role check patterns.
/// </summary>
public static readonly IReadOnlyDictionary<string, IReadOnlyList<GatePattern>> AdminPatterns = new Dictionary<string, IReadOnlyList<GatePattern>>
{
["csharp"] =
[
new GatePattern(@"\[Authorize\(Roles\s*=\s*[""']Admin", "Admin role check", 0.95),
new GatePattern(@"\.IsInRole\([""'][Aa]dmin", "IsInRole admin", 0.90),
new GatePattern(@"Policy\s*=\s*[""']Admin", "Admin policy", 0.90),
new GatePattern(@"\[Authorize\(Roles\s*=\s*[""'].*[Ss]uperuser", "Superuser role", 0.95)
],
["java"] =
[
new GatePattern(@"hasRole\([""']ADMIN", "Spring hasRole ADMIN", 0.95),
new GatePattern(@"@RolesAllowed\([""']admin", "Admin role allowed", 0.95),
new GatePattern(@"hasAuthority\([""']ROLE_ADMIN", "Spring authority admin", 0.95)
],
["javascript"] =
[
new GatePattern(@"req\.user\.role\s*===?\s*[""']admin", "Admin role check", 0.85),
new GatePattern(@"isAdmin\(\)", "isAdmin function", 0.80),
new GatePattern(@"user\.roles\.includes\([""']admin", "Admin roles check", 0.85)
],
["typescript"] =
[
new GatePattern(@"req\.user\.role\s*===?\s*[""']admin", "Admin role check", 0.85),
new GatePattern(@"@Roles\([""']admin", "NestJS Roles decorator", 0.95),
new GatePattern(@"user\.roles\.includes\([""']admin", "Admin roles check", 0.85)
],
["python"] =
[
new GatePattern(@"@user_passes_test\(.*is_superuser", "Django superuser", 0.95),
new GatePattern(@"@permission_required\([""']admin", "Admin permission", 0.90),
new GatePattern(@"request\.user\.is_staff", "Django staff check", 0.85)
],
["go"] =
[
new GatePattern(@"\.HasRole\([""'][Aa]dmin", "Admin role check", 0.90),
new GatePattern(@"isAdmin\(", "Admin function call", 0.80)
],
["ruby"] =
[
new GatePattern(@"current_user\.admin\?", "Admin user check", 0.90),
new GatePattern(@"authorize! :manage", "CanCanCan manage", 0.90)
]
};
/// <summary>
/// Non-default configuration patterns.
/// </summary>
public static readonly IReadOnlyDictionary<string, IReadOnlyList<GatePattern>> ConfigPatterns = new Dictionary<string, IReadOnlyList<GatePattern>>
{
["csharp"] =
[
new GatePattern(@"IConfiguration\[.*\]\s*==\s*[""']true", "Config-gated feature", 0.75),
new GatePattern(@"options\.Value\.[A-Z].*Enabled", "Options pattern enabled", 0.80),
new GatePattern(@"configuration\.GetValue<bool>", "Config bool value", 0.75)
],
["java"] =
[
new GatePattern(@"@ConditionalOnProperty", "Spring conditional property", 0.90),
new GatePattern(@"@Value\([""']\$\{.*enabled", "Spring property enabled", 0.80),
new GatePattern(@"\.getProperty\([""'].*\.enabled", "Property enabled check", 0.75)
],
["javascript"] =
[
new GatePattern(@"config\.[a-z]+\.enabled", "Config enabled check", 0.75),
new GatePattern(@"process\.env\.[A-Z_]+_ENABLED", "Env enabled flag", 0.70),
new GatePattern(@"settings\.[a-z]+\.enabled", "Settings enabled", 0.75)
],
["typescript"] =
[
new GatePattern(@"config\.[a-z]+\.enabled", "Config enabled check", 0.75),
new GatePattern(@"process\.env\.[A-Z_]+_ENABLED", "Env enabled flag", 0.70)
],
["python"] =
[
new GatePattern(@"settings\.[A-Z_]+_ENABLED", "Django settings enabled", 0.75),
new GatePattern(@"os\.getenv\([""'][A-Z_]+_ENABLED", "Env enabled check", 0.70),
new GatePattern(@"config\.get\([""'].*enabled", "Config enabled", 0.75)
],
["go"] =
[
new GatePattern(@"viper\.GetBool\([""'].*enabled", "Viper bool config", 0.80),
new GatePattern(@"os\.Getenv\([""'][A-Z_]+_ENABLED", "Env enabled", 0.70)
],
["ruby"] =
[
new GatePattern(@"Rails\.configuration\.[a-z_]+_enabled", "Rails config enabled", 0.75),
new GatePattern(@"ENV\[[""'][A-Z_]+_ENABLED", "Env enabled", 0.70)
]
};
}
/// <summary>
/// A regex pattern for gate detection.
/// </summary>
/// <param name="Pattern">Regex pattern string</param>
/// <param name="Description">Human-readable description</param>
/// <param name="DefaultConfidence">Default confidence score (0.0-1.0)</param>
public sealed record GatePattern(string Pattern, string Description, double DefaultConfidence);

View File

@@ -0,0 +1,219 @@
using StellaOps.Scanner.Worker.Determinism;
using StellaOps.Scanner.Worker.Determinism.Calculators;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests.Determinism;
public sealed class FidelityMetricsServiceTests
{
private readonly FidelityMetricsService _service = new();
[Fact]
public void Calculate_WithAllIdentical_ReturnsFullScores()
{
var baselineHashes = new Dictionary<string, string>
{
["sbom.json"] = "sha256:abc",
["findings.ndjson"] = "sha256:def"
};
var replayHashes = new List<IReadOnlyDictionary<string, string>>
{
new Dictionary<string, string>
{
["sbom.json"] = "sha256:abc",
["findings.ndjson"] = "sha256:def"
}
};
var baselineFindings = CreateNormalizedFindings();
var replayFindings = new List<NormalizedFindings> { CreateNormalizedFindings() };
var baselineDecision = CreatePolicyDecision();
var replayDecisions = new List<PolicyDecision> { CreatePolicyDecision() };
var metrics = _service.Calculate(
baselineHashes, replayHashes,
baselineFindings, replayFindings,
baselineDecision, replayDecisions);
Assert.Equal(1.0, metrics.BitwiseFidelity);
Assert.Equal(1.0, metrics.SemanticFidelity);
Assert.Equal(1.0, metrics.PolicyFidelity);
Assert.Equal(1, metrics.TotalReplays);
Assert.Equal(1, metrics.IdenticalOutputs);
Assert.Equal(1, metrics.SemanticMatches);
Assert.Equal(1, metrics.PolicyMatches);
Assert.Null(metrics.Mismatches);
}
[Fact]
public void Calculate_WithMixedResults_ReturnsCorrectMetrics()
{
var baselineHashes = new Dictionary<string, string> { ["file.json"] = "hash1" };
var replayHashes = new List<IReadOnlyDictionary<string, string>>
{
new Dictionary<string, string> { ["file.json"] = "hash1" }, // Match
new Dictionary<string, string> { ["file.json"] = "hash2" }, // Mismatch
new Dictionary<string, string> { ["file.json"] = "hash1" } // Match
};
var baselineFindings = CreateNormalizedFindings();
var replayFindings = new List<NormalizedFindings>
{
CreateNormalizedFindings(),
CreateNormalizedFindings(),
CreateNormalizedFindings()
};
var baselineDecision = CreatePolicyDecision();
var replayDecisions = new List<PolicyDecision>
{
CreatePolicyDecision(),
CreatePolicyDecision(),
CreatePolicyDecision()
};
var metrics = _service.Calculate(
baselineHashes, replayHashes,
baselineFindings, replayFindings,
baselineDecision, replayDecisions);
Assert.Equal(2.0 / 3, metrics.BitwiseFidelity, precision: 4);
Assert.Equal(1.0, metrics.SemanticFidelity);
Assert.Equal(1.0, metrics.PolicyFidelity);
Assert.NotNull(metrics.Mismatches);
Assert.Single(metrics.Mismatches!);
}
[Fact]
public void Evaluate_WithPassingMetrics_ReturnsPass()
{
var metrics = new FidelityMetrics
{
BitwiseFidelity = 0.99,
SemanticFidelity = 1.0,
PolicyFidelity = 1.0,
TotalReplays = 10,
IdenticalOutputs = 10,
SemanticMatches = 10,
PolicyMatches = 10,
ComputedAt = DateTimeOffset.UtcNow
};
var thresholds = FidelityThresholds.Default;
var evaluation = _service.Evaluate(metrics, thresholds);
Assert.True(evaluation.Passed);
Assert.False(evaluation.ShouldBlockRelease);
Assert.Empty(evaluation.FailureReasons);
}
[Fact]
public void Evaluate_WithFailingBitwiseFidelity_ReturnsFail()
{
var metrics = new FidelityMetrics
{
BitwiseFidelity = 0.90, // Below 0.98 threshold
SemanticFidelity = 1.0,
PolicyFidelity = 1.0,
TotalReplays = 10,
IdenticalOutputs = 9,
SemanticMatches = 10,
PolicyMatches = 10,
ComputedAt = DateTimeOffset.UtcNow
};
var thresholds = FidelityThresholds.Default;
var evaluation = _service.Evaluate(metrics, thresholds);
Assert.False(evaluation.Passed);
Assert.Single(evaluation.FailureReasons);
Assert.Contains("BF", evaluation.FailureReasons[0]);
}
[Fact]
public void Evaluate_WithCriticallyLowBF_ShouldBlockRelease()
{
var metrics = new FidelityMetrics
{
BitwiseFidelity = 0.85, // Below 0.90 block threshold
SemanticFidelity = 1.0,
PolicyFidelity = 1.0,
TotalReplays = 10,
IdenticalOutputs = 8,
SemanticMatches = 10,
PolicyMatches = 10,
ComputedAt = DateTimeOffset.UtcNow
};
var thresholds = FidelityThresholds.Default;
var evaluation = _service.Evaluate(metrics, thresholds);
Assert.False(evaluation.Passed);
Assert.True(evaluation.ShouldBlockRelease);
}
[Fact]
public void Evaluate_WithRegulatedProject_UsesLowerThreshold()
{
var metrics = new FidelityMetrics
{
BitwiseFidelity = 0.96, // Above 0.95 regulated, below 0.98 general
SemanticFidelity = 1.0,
PolicyFidelity = 1.0,
TotalReplays = 10,
IdenticalOutputs = 9,
SemanticMatches = 10,
PolicyMatches = 10,
ComputedAt = DateTimeOffset.UtcNow
};
var thresholds = FidelityThresholds.Default;
var generalEval = _service.Evaluate(metrics, thresholds, isRegulated: false);
var regulatedEval = _service.Evaluate(metrics, thresholds, isRegulated: true);
Assert.False(generalEval.Passed); // Fails 0.98 threshold
Assert.True(regulatedEval.Passed); // Passes 0.95 threshold
}
[Fact]
public void Evaluate_WithMultipleFailures_ReportsAll()
{
var metrics = new FidelityMetrics
{
BitwiseFidelity = 0.90,
SemanticFidelity = 0.80,
PolicyFidelity = 0.70,
TotalReplays = 10,
IdenticalOutputs = 9,
SemanticMatches = 8,
PolicyMatches = 7,
ComputedAt = DateTimeOffset.UtcNow
};
var thresholds = FidelityThresholds.Default;
var evaluation = _service.Evaluate(metrics, thresholds);
Assert.False(evaluation.Passed);
Assert.Equal(3, evaluation.FailureReasons.Count);
}
private static NormalizedFindings CreateNormalizedFindings() => new()
{
Packages = new List<NormalizedPackage>
{
new("pkg:npm/test@1.0.0", "1.0.0")
},
Cves = new HashSet<string> { "CVE-2024-0001" },
SeverityCounts = new Dictionary<string, int> { ["MEDIUM"] = 1 },
Verdicts = new Dictionary<string, string> { ["overall"] = "pass" }
};
private static PolicyDecision CreatePolicyDecision() => new()
{
Passed = true,
ReasonCodes = new List<string> { "CLEAN" },
ViolationCount = 0,
BlockLevel = "none"
};
}

View File

@@ -0,0 +1,213 @@
using StellaOps.Scanner.Worker.Determinism;
using StellaOps.Scanner.Worker.Determinism.Calculators;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests.Determinism;
public sealed class PolicyFidelityCalculatorTests
{
private readonly PolicyFidelityCalculator _calculator = new();
[Fact]
public void Calculate_WithEmptyReplays_ReturnsFullScore()
{
var baseline = CreatePassingDecision();
var replays = Array.Empty<PolicyDecision>();
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(1.0, score);
Assert.Equal(0, matchCount);
Assert.Empty(mismatches);
}
[Fact]
public void Calculate_WithIdenticalDecisions_ReturnsFullScore()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
CreatePassingDecision(),
CreatePassingDecision()
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(1.0, score);
Assert.Equal(2, matchCount);
Assert.Empty(mismatches);
}
[Fact]
public void Calculate_WithDifferentOutcome_DetectsMismatch()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
new PolicyDecision
{
Passed = false, // Different outcome
ReasonCodes = new List<string> { "NO_VIOLATIONS" },
ViolationCount = 0,
BlockLevel = "none"
}
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(0.0, score);
Assert.Equal(0, matchCount);
Assert.Single(mismatches);
Assert.Equal(FidelityMismatchType.PolicyDrift, mismatches[0].Type);
Assert.Contains("outcome:True→False", mismatches[0].AffectedArtifacts!);
}
[Fact]
public void Calculate_WithDifferentReasonCodes_DetectsMismatch()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
new PolicyDecision
{
Passed = true,
ReasonCodes = new List<string> { "DIFFERENT_REASON" }, // Different reason
ViolationCount = 0,
BlockLevel = "none"
}
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(0.0, score);
Assert.Contains("reason_codes", mismatches[0].AffectedArtifacts!);
}
[Fact]
public void Calculate_WithDifferentViolationCount_DetectsMismatch()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
new PolicyDecision
{
Passed = true,
ReasonCodes = new List<string> { "NO_VIOLATIONS" },
ViolationCount = 5, // Different count
BlockLevel = "none"
}
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(0.0, score);
Assert.Contains("violations:0→5", mismatches[0].AffectedArtifacts!);
}
[Fact]
public void Calculate_WithDifferentBlockLevel_DetectsMismatch()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
new PolicyDecision
{
Passed = true,
ReasonCodes = new List<string> { "NO_VIOLATIONS" },
ViolationCount = 0,
BlockLevel = "warn" // Different block level
}
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(0.0, score);
Assert.Contains("block_level:none→warn", mismatches[0].AffectedArtifacts!);
}
[Fact]
public void Calculate_WithMultipleDifferences_ReportsAll()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
new PolicyDecision
{
Passed = false, // Different
ReasonCodes = new List<string> { "CRITICAL_VULN" }, // Different
ViolationCount = 3, // Different
BlockLevel = "block" // Different
}
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(0.0, score);
Assert.Single(mismatches);
var mismatch = mismatches[0];
Assert.Equal(4, mismatch.AffectedArtifacts!.Count); // All 4 differences detected
}
[Fact]
public void Calculate_WithPartialMatches_ReturnsCorrectScore()
{
var baseline = CreatePassingDecision();
var replays = new List<PolicyDecision>
{
CreatePassingDecision(), // Match
new PolicyDecision // Mismatch
{
Passed = false,
ReasonCodes = new List<string>(),
ViolationCount = 1,
BlockLevel = "block"
},
CreatePassingDecision(), // Match
CreatePassingDecision() // Match
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(3.0 / 4, score, precision: 4);
Assert.Equal(3, matchCount);
Assert.Single(mismatches);
Assert.Equal(1, mismatches[0].RunIndex);
}
[Fact]
public void Calculate_WithReasonCodesInDifferentOrder_StillMatches()
{
var baseline = new PolicyDecision
{
Passed = true,
ReasonCodes = new List<string> { "CODE_A", "CODE_B", "CODE_C" },
ViolationCount = 0,
BlockLevel = "none"
};
var replays = new List<PolicyDecision>
{
new PolicyDecision
{
Passed = true,
ReasonCodes = new List<string> { "CODE_C", "CODE_A", "CODE_B" }, // Different order
ViolationCount = 0,
BlockLevel = "none"
}
};
var (score, matchCount, mismatches) = _calculator.Calculate(baseline, replays);
Assert.Equal(1.0, score);
Assert.Equal(1, matchCount);
Assert.Empty(mismatches);
}
private static PolicyDecision CreatePassingDecision() => new()
{
Passed = true,
ReasonCodes = new List<string> { "NO_VIOLATIONS" },
ViolationCount = 0,
BlockLevel = "none",
PolicyHash = "sha256:abc123"
};
}

View File

@@ -0,0 +1,164 @@
namespace StellaOps.Scheduler.Storage.Postgres.Models;
/// <summary>
/// Scope type for failure signatures.
/// </summary>
public enum FailureSignatureScopeType
{
/// <summary>Repository scope.</summary>
Repo,
/// <summary>Container image scope.</summary>
Image,
/// <summary>Artifact scope.</summary>
Artifact,
/// <summary>Global scope (all tenants).</summary>
Global
}
/// <summary>
/// Error category for failure classification.
/// </summary>
public enum ErrorCategory
{
/// <summary>Network-related failure.</summary>
Network,
/// <summary>Authentication/authorization failure.</summary>
Auth,
/// <summary>Validation failure.</summary>
Validation,
/// <summary>Resource exhaustion (memory, disk, CPU).</summary>
Resource,
/// <summary>Operation timeout.</summary>
Timeout,
/// <summary>Configuration error.</summary>
Config,
/// <summary>Unknown/uncategorized error.</summary>
Unknown
}
/// <summary>
/// Resolution status for failure signatures.
/// </summary>
public enum ResolutionStatus
{
/// <summary>Issue is not yet resolved.</summary>
Unresolved,
/// <summary>Issue is being investigated.</summary>
Investigating,
/// <summary>Issue has been resolved.</summary>
Resolved,
/// <summary>Issue will not be fixed.</summary>
WontFix
}
/// <summary>
/// Predicted outcome for TTFS hints.
/// </summary>
public enum PredictedOutcome
{
/// <summary>Prediction not available.</summary>
Unknown,
/// <summary>Expected to pass.</summary>
Pass,
/// <summary>Expected to fail.</summary>
Fail,
/// <summary>Expected to be flaky.</summary>
Flaky
}
/// <summary>
/// Represents a failure signature entity for predictive TTFS hints.
/// Tracks common failure patterns by scope, toolchain, and error code.
/// </summary>
public sealed class FailureSignatureEntity
{
/// <summary>
/// Unique signature identifier.
/// </summary>
public Guid SignatureId { get; init; }
/// <summary>
/// Tenant this signature belongs to.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// When this signature was created.
/// </summary>
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// When this signature was last updated.
/// </summary>
public DateTimeOffset UpdatedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// Type of scope for this signature.
/// </summary>
public FailureSignatureScopeType ScopeType { get; init; }
/// <summary>
/// Identifier within the scope (repo name, image digest, etc).
/// </summary>
public required string ScopeId { get; init; }
/// <summary>
/// Hash of the toolchain/build environment.
/// </summary>
public required string ToolchainHash { get; init; }
/// <summary>
/// Error code if available.
/// </summary>
public string? ErrorCode { get; init; }
/// <summary>
/// Category of error.
/// </summary>
public ErrorCategory? ErrorCategory { get; init; }
/// <summary>
/// Number of times this signature has been seen.
/// </summary>
public int OccurrenceCount { get; init; } = 1;
/// <summary>
/// When this signature was first seen.
/// </summary>
public DateTimeOffset FirstSeenAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// When this signature was last seen.
/// </summary>
public DateTimeOffset LastSeenAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// Current resolution status.
/// </summary>
public ResolutionStatus ResolutionStatus { get; init; } = ResolutionStatus.Unresolved;
/// <summary>
/// Notes about resolution.
/// </summary>
public string? ResolutionNotes { get; init; }
/// <summary>
/// When the issue was resolved.
/// </summary>
public DateTimeOffset? ResolvedAt { get; init; }
/// <summary>
/// Who resolved the issue.
/// </summary>
public string? ResolvedBy { get; init; }
/// <summary>
/// Predicted outcome based on this signature.
/// </summary>
public PredictedOutcome PredictedOutcome { get; init; } = PredictedOutcome.Unknown;
/// <summary>
/// Confidence score for the prediction (0.0 to 1.0).
/// </summary>
public decimal? ConfidenceScore { get; init; }
}

View File

@@ -0,0 +1,440 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Infrastructure.Postgres.Repositories;
using StellaOps.Scheduler.Storage.Postgres.Models;
namespace StellaOps.Scheduler.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for failure signature operations.
/// </summary>
public sealed class FailureSignatureRepository : RepositoryBase<SchedulerDataSource>, IFailureSignatureRepository
{
/// <summary>
/// Creates a new failure signature repository.
/// </summary>
public FailureSignatureRepository(SchedulerDataSource dataSource, ILogger<FailureSignatureRepository> logger)
: base(dataSource, logger)
{
}
/// <inheritdoc />
public async Task<FailureSignatureEntity> CreateAsync(
FailureSignatureEntity signature,
CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO scheduler.failure_signatures (
signature_id, tenant_id, scope_type, scope_id, toolchain_hash,
error_code, error_category, occurrence_count, first_seen_at, last_seen_at,
resolution_status, resolution_notes, predicted_outcome, confidence_score
)
VALUES (
@signature_id, @tenant_id, @scope_type, @scope_id, @toolchain_hash,
@error_code, @error_category, @occurrence_count, @first_seen_at, @last_seen_at,
@resolution_status, @resolution_notes, @predicted_outcome, @confidence_score
)
RETURNING *
""";
await using var connection = await DataSource.OpenConnectionAsync(signature.TenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddSignatureParameters(command, signature);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
return MapSignature(reader);
}
/// <inheritdoc />
public async Task<FailureSignatureEntity?> GetByIdAsync(
string tenantId,
Guid signatureId,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT * FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id AND signature_id = @signature_id
""";
return await QuerySingleOrDefaultAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "signature_id", signatureId);
},
MapSignature,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<FailureSignatureEntity?> GetByKeyAsync(
string tenantId,
FailureSignatureScopeType scopeType,
string scopeId,
string toolchainHash,
string? errorCode,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT * FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id
AND scope_type = @scope_type
AND scope_id = @scope_id
AND toolchain_hash = @toolchain_hash
AND (error_code = @error_code OR (@error_code IS NULL AND error_code IS NULL))
""";
return await QuerySingleOrDefaultAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "scope_type", scopeType.ToString().ToLowerInvariant());
AddParameter(cmd, "scope_id", scopeId);
AddParameter(cmd, "toolchain_hash", toolchainHash);
AddParameter(cmd, "error_code", errorCode ?? (object)DBNull.Value);
},
MapSignature,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<FailureSignatureEntity>> GetByScopeAsync(
string tenantId,
FailureSignatureScopeType scopeType,
string scopeId,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT * FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id
AND scope_type = @scope_type
AND scope_id = @scope_id
ORDER BY last_seen_at DESC
""";
return await QueryListAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "scope_type", scopeType.ToString().ToLowerInvariant());
AddParameter(cmd, "scope_id", scopeId);
},
MapSignature,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<FailureSignatureEntity>> GetUnresolvedAsync(
string tenantId,
int limit = 100,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT * FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id
AND resolution_status = 'unresolved'
ORDER BY occurrence_count DESC, last_seen_at DESC
LIMIT @limit
""";
return await QueryListAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "limit", limit);
},
MapSignature,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<FailureSignatureEntity>> GetByPredictedOutcomeAsync(
string tenantId,
PredictedOutcome outcome,
decimal minConfidence = 0.5m,
int limit = 100,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT * FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id
AND predicted_outcome = @predicted_outcome
AND confidence_score >= @min_confidence
ORDER BY confidence_score DESC, last_seen_at DESC
LIMIT @limit
""";
return await QueryListAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "predicted_outcome", outcome.ToString().ToLowerInvariant());
AddParameter(cmd, "min_confidence", minConfidence);
AddParameter(cmd, "limit", limit);
},
MapSignature,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<FailureSignatureEntity> UpsertOccurrenceAsync(
string tenantId,
FailureSignatureScopeType scopeType,
string scopeId,
string toolchainHash,
string? errorCode,
ErrorCategory? errorCategory,
CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO scheduler.failure_signatures (
signature_id, tenant_id, scope_type, scope_id, toolchain_hash,
error_code, error_category, occurrence_count, first_seen_at, last_seen_at
)
VALUES (
gen_random_uuid(), @tenant_id, @scope_type, @scope_id, @toolchain_hash,
@error_code, @error_category, 1, NOW(), NOW()
)
ON CONFLICT (tenant_id, scope_type, scope_id, toolchain_hash, error_code)
DO UPDATE SET
occurrence_count = scheduler.failure_signatures.occurrence_count + 1,
last_seen_at = NOW(),
updated_at = NOW(),
error_category = COALESCE(EXCLUDED.error_category, scheduler.failure_signatures.error_category)
RETURNING *
""";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
AddParameter(command, "scope_type", scopeType.ToString().ToLowerInvariant());
AddParameter(command, "scope_id", scopeId);
AddParameter(command, "toolchain_hash", toolchainHash);
AddParameter(command, "error_code", errorCode ?? (object)DBNull.Value);
AddParameter(command, "error_category", errorCategory?.ToString().ToLowerInvariant() ?? (object)DBNull.Value);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
await reader.ReadAsync(cancellationToken).ConfigureAwait(false);
return MapSignature(reader);
}
/// <inheritdoc />
public async Task<bool> UpdateResolutionAsync(
string tenantId,
Guid signatureId,
ResolutionStatus status,
string? notes,
string? resolvedBy,
CancellationToken cancellationToken = default)
{
const string sql = """
UPDATE scheduler.failure_signatures
SET resolution_status = @resolution_status,
resolution_notes = @resolution_notes,
resolved_by = @resolved_by,
resolved_at = CASE WHEN @resolution_status = 'resolved' THEN NOW() ELSE resolved_at END,
updated_at = NOW()
WHERE tenant_id = @tenant_id AND signature_id = @signature_id
""";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
AddParameter(command, "signature_id", signatureId);
AddParameter(command, "resolution_status", status.ToString().ToLowerInvariant());
AddParameter(command, "resolution_notes", notes ?? (object)DBNull.Value);
AddParameter(command, "resolved_by", resolvedBy ?? (object)DBNull.Value);
var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
return rowsAffected > 0;
}
/// <inheritdoc />
public async Task<bool> UpdatePredictionAsync(
string tenantId,
Guid signatureId,
PredictedOutcome outcome,
decimal confidence,
CancellationToken cancellationToken = default)
{
const string sql = """
UPDATE scheduler.failure_signatures
SET predicted_outcome = @predicted_outcome,
confidence_score = @confidence_score,
updated_at = NOW()
WHERE tenant_id = @tenant_id AND signature_id = @signature_id
""";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
AddParameter(command, "signature_id", signatureId);
AddParameter(command, "predicted_outcome", outcome.ToString().ToLowerInvariant());
AddParameter(command, "confidence_score", confidence);
var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
return rowsAffected > 0;
}
/// <inheritdoc />
public async Task<bool> DeleteAsync(
string tenantId,
Guid signatureId,
CancellationToken cancellationToken = default)
{
const string sql = """
DELETE FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id AND signature_id = @signature_id
""";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
AddParameter(command, "signature_id", signatureId);
var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
return rowsAffected > 0;
}
/// <inheritdoc />
public async Task<int> PruneResolvedAsync(
string tenantId,
TimeSpan olderThan,
CancellationToken cancellationToken = default)
{
const string sql = """
DELETE FROM scheduler.failure_signatures
WHERE tenant_id = @tenant_id
AND resolution_status = 'resolved'
AND resolved_at < @cutoff
""";
var cutoff = DateTimeOffset.UtcNow.Subtract(olderThan);
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
AddParameter(command, "cutoff", cutoff);
return await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
private void AddSignatureParameters(NpgsqlCommand command, FailureSignatureEntity signature)
{
AddParameter(command, "signature_id", signature.SignatureId == Guid.Empty ? Guid.NewGuid() : signature.SignatureId);
AddParameter(command, "tenant_id", signature.TenantId);
AddParameter(command, "scope_type", signature.ScopeType.ToString().ToLowerInvariant());
AddParameter(command, "scope_id", signature.ScopeId);
AddParameter(command, "toolchain_hash", signature.ToolchainHash);
AddParameter(command, "error_code", signature.ErrorCode ?? (object)DBNull.Value);
AddParameter(command, "error_category", signature.ErrorCategory?.ToString().ToLowerInvariant() ?? (object)DBNull.Value);
AddParameter(command, "occurrence_count", signature.OccurrenceCount);
AddParameter(command, "first_seen_at", signature.FirstSeenAt);
AddParameter(command, "last_seen_at", signature.LastSeenAt);
AddParameter(command, "resolution_status", signature.ResolutionStatus.ToString().ToLowerInvariant());
AddParameter(command, "resolution_notes", signature.ResolutionNotes ?? (object)DBNull.Value);
AddParameter(command, "predicted_outcome", signature.PredictedOutcome.ToString().ToLowerInvariant());
AddParameter(command, "confidence_score", signature.ConfidenceScore ?? (object)DBNull.Value);
}
private static FailureSignatureEntity MapSignature(NpgsqlDataReader reader)
{
return new FailureSignatureEntity
{
SignatureId = reader.GetGuid(reader.GetOrdinal("signature_id")),
TenantId = reader.GetString(reader.GetOrdinal("tenant_id")),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("created_at")),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("updated_at")),
ScopeType = ParseScopeType(reader.GetString(reader.GetOrdinal("scope_type"))),
ScopeId = reader.GetString(reader.GetOrdinal("scope_id")),
ToolchainHash = reader.GetString(reader.GetOrdinal("toolchain_hash")),
ErrorCode = reader.IsDBNull(reader.GetOrdinal("error_code"))
? null
: reader.GetString(reader.GetOrdinal("error_code")),
ErrorCategory = reader.IsDBNull(reader.GetOrdinal("error_category"))
? null
: ParseErrorCategory(reader.GetString(reader.GetOrdinal("error_category"))),
OccurrenceCount = reader.GetInt32(reader.GetOrdinal("occurrence_count")),
FirstSeenAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("first_seen_at")),
LastSeenAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("last_seen_at")),
ResolutionStatus = ParseResolutionStatus(reader.GetString(reader.GetOrdinal("resolution_status"))),
ResolutionNotes = reader.IsDBNull(reader.GetOrdinal("resolution_notes"))
? null
: reader.GetString(reader.GetOrdinal("resolution_notes")),
ResolvedAt = reader.IsDBNull(reader.GetOrdinal("resolved_at"))
? null
: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("resolved_at")),
ResolvedBy = reader.IsDBNull(reader.GetOrdinal("resolved_by"))
? null
: reader.GetString(reader.GetOrdinal("resolved_by")),
PredictedOutcome = reader.IsDBNull(reader.GetOrdinal("predicted_outcome"))
? PredictedOutcome.Unknown
: ParsePredictedOutcome(reader.GetString(reader.GetOrdinal("predicted_outcome"))),
ConfidenceScore = reader.IsDBNull(reader.GetOrdinal("confidence_score"))
? null
: reader.GetDecimal(reader.GetOrdinal("confidence_score"))
};
}
private static FailureSignatureScopeType ParseScopeType(string value) => value.ToLowerInvariant() switch
{
"repo" => FailureSignatureScopeType.Repo,
"image" => FailureSignatureScopeType.Image,
"artifact" => FailureSignatureScopeType.Artifact,
"global" => FailureSignatureScopeType.Global,
_ => throw new ArgumentException($"Unknown scope type: {value}")
};
private static ErrorCategory ParseErrorCategory(string value) => value.ToLowerInvariant() switch
{
"network" => ErrorCategory.Network,
"auth" => ErrorCategory.Auth,
"validation" => ErrorCategory.Validation,
"resource" => ErrorCategory.Resource,
"timeout" => ErrorCategory.Timeout,
"config" => ErrorCategory.Config,
_ => ErrorCategory.Unknown
};
private static ResolutionStatus ParseResolutionStatus(string value) => value.ToLowerInvariant() switch
{
"unresolved" => ResolutionStatus.Unresolved,
"investigating" => ResolutionStatus.Investigating,
"resolved" => ResolutionStatus.Resolved,
"wont_fix" or "wontfix" => ResolutionStatus.WontFix,
_ => ResolutionStatus.Unresolved
};
private static PredictedOutcome ParsePredictedOutcome(string value) => value.ToLowerInvariant() switch
{
"pass" => PredictedOutcome.Pass,
"fail" => PredictedOutcome.Fail,
"flaky" => PredictedOutcome.Flaky,
_ => PredictedOutcome.Unknown
};
}

View File

@@ -0,0 +1,112 @@
using StellaOps.Scheduler.Storage.Postgres.Models;
namespace StellaOps.Scheduler.Storage.Postgres.Repositories;
/// <summary>
/// Repository interface for failure signature operations.
/// </summary>
public interface IFailureSignatureRepository
{
/// <summary>
/// Creates a new failure signature.
/// </summary>
Task<FailureSignatureEntity> CreateAsync(
FailureSignatureEntity signature,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a failure signature by ID.
/// </summary>
Task<FailureSignatureEntity?> GetByIdAsync(
string tenantId,
Guid signatureId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a failure signature by its unique key (scope + toolchain + error code).
/// </summary>
Task<FailureSignatureEntity?> GetByKeyAsync(
string tenantId,
FailureSignatureScopeType scopeType,
string scopeId,
string toolchainHash,
string? errorCode,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets all failure signatures for a scope.
/// </summary>
Task<IReadOnlyList<FailureSignatureEntity>> GetByScopeAsync(
string tenantId,
FailureSignatureScopeType scopeType,
string scopeId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets all unresolved failure signatures for a tenant.
/// </summary>
Task<IReadOnlyList<FailureSignatureEntity>> GetUnresolvedAsync(
string tenantId,
int limit = 100,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets failure signatures matching a predicted outcome.
/// </summary>
Task<IReadOnlyList<FailureSignatureEntity>> GetByPredictedOutcomeAsync(
string tenantId,
PredictedOutcome outcome,
decimal minConfidence = 0.5m,
int limit = 100,
CancellationToken cancellationToken = default);
/// <summary>
/// Increments the occurrence count and updates last seen timestamp.
/// Creates the signature if it doesn't exist (upsert).
/// </summary>
Task<FailureSignatureEntity> UpsertOccurrenceAsync(
string tenantId,
FailureSignatureScopeType scopeType,
string scopeId,
string toolchainHash,
string? errorCode,
ErrorCategory? errorCategory,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the resolution status of a signature.
/// </summary>
Task<bool> UpdateResolutionAsync(
string tenantId,
Guid signatureId,
ResolutionStatus status,
string? notes,
string? resolvedBy,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the predicted outcome for a signature.
/// </summary>
Task<bool> UpdatePredictionAsync(
string tenantId,
Guid signatureId,
PredictedOutcome outcome,
decimal confidence,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a failure signature.
/// </summary>
Task<bool> DeleteAsync(
string tenantId,
Guid signatureId,
CancellationToken cancellationToken = default);
/// <summary>
/// Prunes old resolved signatures.
/// </summary>
Task<int> PruneResolvedAsync(
string tenantId,
TimeSpan olderThan,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,311 @@
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Storage.Postgres.Models;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.Worker.Indexing;
/// <summary>
/// Options for the failure signature indexer.
/// </summary>
public sealed class FailureSignatureIndexerOptions
{
/// <summary>
/// Interval between indexing runs.
/// </summary>
public TimeSpan IndexInterval { get; set; } = TimeSpan.FromMinutes(5);
/// <summary>
/// Whether the indexer is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Batch size for processing job failures.
/// </summary>
public int BatchSize { get; set; } = 100;
/// <summary>
/// Age threshold for pruning resolved signatures.
/// </summary>
public TimeSpan PruneResolvedOlderThan { get; set; } = TimeSpan.FromDays(90);
}
/// <summary>
/// Background service that indexes job failures into failure signatures.
/// Analyzes completed jobs to identify patterns for predictive TTFS hints.
/// </summary>
public sealed class FailureSignatureIndexer : BackgroundService
{
private readonly IFailureSignatureRepository _signatureRepository;
private readonly IJobRepository _jobRepository;
private readonly IJobHistoryRepository _historyRepository;
private readonly IOptions<FailureSignatureIndexerOptions> _options;
private readonly ILogger<FailureSignatureIndexer> _logger;
public FailureSignatureIndexer(
IFailureSignatureRepository signatureRepository,
IJobRepository jobRepository,
IJobHistoryRepository historyRepository,
IOptions<FailureSignatureIndexerOptions> options,
ILogger<FailureSignatureIndexer> logger)
{
_signatureRepository = signatureRepository;
_jobRepository = jobRepository;
_historyRepository = historyRepository;
_options = options;
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.Value.Enabled)
{
_logger.LogInformation("Failure signature indexer is disabled");
return;
}
_logger.LogInformation("Starting failure signature indexer with interval {Interval}",
_options.Value.IndexInterval);
while (!stoppingToken.IsCancellationRequested)
{
try
{
await IndexFailuresAsync(stoppingToken);
await PruneOldSignaturesAsync(stoppingToken);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during failure signature indexing");
}
await Task.Delay(_options.Value.IndexInterval, stoppingToken);
}
}
private async Task IndexFailuresAsync(CancellationToken ct)
{
_logger.LogDebug("Starting failure indexing batch");
// Get recent failed jobs that haven't been indexed
var failedJobs = await _historyRepository.GetRecentFailedJobsAsync(
_options.Value.BatchSize,
ct);
var indexed = 0;
foreach (var job in failedJobs)
{
try
{
var signature = await ExtractSignatureAsync(job, ct);
if (signature != null)
{
await _signatureRepository.UpsertOccurrenceAsync(
job.TenantId,
signature.ScopeType,
signature.ScopeId,
signature.ToolchainHash,
signature.ErrorCode,
signature.ErrorCategory,
ct);
indexed++;
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to index signature for job {JobId}", job.JobId);
}
}
if (indexed > 0)
{
_logger.LogInformation("Indexed {Count} failure signatures", indexed);
}
}
private async Task PruneOldSignaturesAsync(CancellationToken ct)
{
// Prune is expensive, only run occasionally
var random = Random.Shared.Next(0, 12);
if (random != 0)
{
return;
}
_logger.LogDebug("Starting resolved signature pruning");
// Get all tenants with resolved signatures
// In production, this would be paginated
try
{
var pruned = await _signatureRepository.PruneResolvedAsync(
"*", // All tenants
_options.Value.PruneResolvedOlderThan,
ct);
if (pruned > 0)
{
_logger.LogInformation("Pruned {Count} old resolved signatures", pruned);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to prune resolved signatures");
}
}
private Task<FailureSignatureExtraction?> ExtractSignatureAsync(
FailedJobRecord job,
CancellationToken ct)
{
// Extract signature from job failure
// This would analyze the job metadata, error details, etc.
var scopeType = DetermineScopeType(job);
var scopeId = ExtractScopeId(job, scopeType);
var toolchainHash = ComputeToolchainHash(job);
var (errorCode, category) = ClassifyError(job);
if (string.IsNullOrEmpty(scopeId) || string.IsNullOrEmpty(toolchainHash))
{
return Task.FromResult<FailureSignatureExtraction?>(null);
}
var extraction = new FailureSignatureExtraction
{
ScopeType = scopeType,
ScopeId = scopeId,
ToolchainHash = toolchainHash,
ErrorCode = errorCode,
ErrorCategory = category
};
return Task.FromResult<FailureSignatureExtraction?>(extraction);
}
private static FailureSignatureScopeType DetermineScopeType(FailedJobRecord job)
{
// Determine scope based on job type and context
if (!string.IsNullOrEmpty(job.ImageDigest))
{
return FailureSignatureScopeType.Image;
}
if (!string.IsNullOrEmpty(job.ArtifactDigest))
{
return FailureSignatureScopeType.Artifact;
}
if (!string.IsNullOrEmpty(job.Repository))
{
return FailureSignatureScopeType.Repo;
}
return FailureSignatureScopeType.Global;
}
private static string ExtractScopeId(FailedJobRecord job, FailureSignatureScopeType scopeType)
{
return scopeType switch
{
FailureSignatureScopeType.Image => job.ImageDigest ?? "",
FailureSignatureScopeType.Artifact => job.ArtifactDigest ?? "",
FailureSignatureScopeType.Repo => job.Repository ?? "",
FailureSignatureScopeType.Global => "global",
_ => ""
};
}
private static string ComputeToolchainHash(FailedJobRecord job)
{
// Compute a fingerprint of the build/scan environment
// This includes scanner versions, tool versions, etc.
var components = new[]
{
job.JobType,
job.ScannerVersion ?? "unknown",
job.RuntimeVersion ?? "unknown"
};
var combined = string.Join("|", components);
var hash = System.Security.Cryptography.SHA256.HashData(
System.Text.Encoding.UTF8.GetBytes(combined));
return Convert.ToHexStringLower(hash[..8]); // First 8 bytes
}
private static (string? ErrorCode, ErrorCategory Category) ClassifyError(FailedJobRecord job)
{
// Classify error based on error message and details
var error = job.Error?.ToLowerInvariant() ?? "";
var errorCode = job.ErrorCode;
if (error.Contains("timeout") || error.Contains("timed out"))
{
return (errorCode, ErrorCategory.Timeout);
}
if (error.Contains("unauthorized") || error.Contains("authentication") || error.Contains("401"))
{
return (errorCode, ErrorCategory.Auth);
}
if (error.Contains("network") || error.Contains("connection refused") || error.Contains("dns"))
{
return (errorCode, ErrorCategory.Network);
}
if (error.Contains("validation") || error.Contains("invalid") || error.Contains("malformed"))
{
return (errorCode, ErrorCategory.Validation);
}
if (error.Contains("out of memory") || error.Contains("disk full") || error.Contains("resource"))
{
return (errorCode, ErrorCategory.Resource);
}
if (error.Contains("config") || error.Contains("configuration"))
{
return (errorCode, ErrorCategory.Config);
}
return (errorCode, ErrorCategory.Unknown);
}
}
/// <summary>
/// Extracted failure signature data.
/// </summary>
internal sealed class FailureSignatureExtraction
{
public FailureSignatureScopeType ScopeType { get; init; }
public required string ScopeId { get; init; }
public required string ToolchainHash { get; init; }
public string? ErrorCode { get; init; }
public ErrorCategory ErrorCategory { get; init; }
}
/// <summary>
/// Record representing a failed job for signature extraction.
/// </summary>
public sealed record FailedJobRecord
{
public required Guid JobId { get; init; }
public required string TenantId { get; init; }
public required string JobType { get; init; }
public string? ImageDigest { get; init; }
public string? ArtifactDigest { get; init; }
public string? Repository { get; init; }
public string? Error { get; init; }
public string? ErrorCode { get; init; }
public string? ScannerVersion { get; init; }
public string? RuntimeVersion { get; init; }
public DateTimeOffset FailedAt { get; init; }
}