Add comprehensive security tests for OWASP A03 (Injection) and A10 (SSRF)
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled

- Implemented InjectionTests.cs to cover various injection vulnerabilities including SQL, NoSQL, Command, LDAP, and XPath injections.
- Created SsrfTests.cs to test for Server-Side Request Forgery (SSRF) vulnerabilities, including internal URL access, cloud metadata access, and URL allowlist bypass attempts.
- Introduced MaliciousPayloads.cs to store a collection of malicious payloads for testing various security vulnerabilities.
- Added SecurityAssertions.cs for common security-specific assertion helpers.
- Established SecurityTestBase.cs as a base class for security tests, providing common infrastructure and mocking utilities.
- Configured the test project StellaOps.Security.Tests.csproj with necessary dependencies for testing.
This commit is contained in:
master
2025-12-16 13:11:57 +02:00
parent 5a480a3c2a
commit b55d9fa68d
72 changed files with 8051 additions and 71 deletions

View File

@@ -0,0 +1,306 @@
// =============================================================================
// EvidenceGraph.cs
// Evidence graph schema and deterministic serializer
// Part of Step 5: Graph Emission
// =============================================================================
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Evidence graph representing the reconciled evidence for a set of artifacts.
/// Designed for deterministic serialization and integrity verification.
/// </summary>
public sealed class EvidenceGraph
{
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
[JsonPropertyName("schemaVersion")]
public string SchemaVersion { get; init; } = "1.0.0";
/// <summary>
/// Generation timestamp in ISO 8601 UTC format.
/// </summary>
[JsonPropertyName("generatedAt")]
public string GeneratedAt { get; init; } = DateTimeOffset.UtcNow.ToString("O");
/// <summary>
/// Generator tool identifier.
/// </summary>
[JsonPropertyName("generator")]
public string Generator { get; init; } = "StellaOps.AirGap.Importer";
/// <summary>
/// Artifact nodes in the graph.
/// </summary>
[JsonPropertyName("nodes")]
public IReadOnlyList<EvidenceNode> Nodes { get; init; } = [];
/// <summary>
/// Edges representing relationships between nodes.
/// </summary>
[JsonPropertyName("edges")]
public IReadOnlyList<EvidenceEdge> Edges { get; init; } = [];
/// <summary>
/// Metadata about the reconciliation process.
/// </summary>
[JsonPropertyName("metadata")]
public EvidenceGraphMetadata Metadata { get; init; } = new();
}
/// <summary>
/// A node in the evidence graph representing an artifact with collected evidence.
/// </summary>
public sealed record EvidenceNode
{
/// <summary>
/// Node identifier (typically the artifact digest).
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Node type (artifact, sbom, attestation, vex).
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Normalized artifact digest.
/// </summary>
[JsonPropertyName("digest")]
public string? Digest { get; init; }
/// <summary>
/// Human-readable name or label.
/// </summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
/// <summary>
/// Associated SBOM references.
/// </summary>
[JsonPropertyName("sboms")]
public IReadOnlyList<SbomNodeRef>? Sboms { get; init; }
/// <summary>
/// Associated attestation references.
/// </summary>
[JsonPropertyName("attestations")]
public IReadOnlyList<AttestationNodeRef>? Attestations { get; init; }
/// <summary>
/// Merged VEX statements.
/// </summary>
[JsonPropertyName("vexStatements")]
public IReadOnlyList<VexStatementRef>? VexStatements { get; init; }
}
/// <summary>
/// Reference to an SBOM in the evidence graph.
/// </summary>
public sealed record SbomNodeRef
{
[JsonPropertyName("format")]
public required string Format { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("contentHash")]
public required string ContentHash { get; init; }
}
/// <summary>
/// Reference to an attestation in the evidence graph.
/// </summary>
public sealed record AttestationNodeRef
{
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("signatureValid")]
public bool SignatureValid { get; init; }
[JsonPropertyName("rekorVerified")]
public bool RekorVerified { get; init; }
}
/// <summary>
/// Merged VEX statement reference in the evidence graph.
/// </summary>
public sealed record VexStatementRef
{
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("source")]
public required string Source { get; init; }
[JsonPropertyName("justification")]
public string? Justification { get; init; }
}
/// <summary>
/// An edge in the evidence graph representing a relationship.
/// </summary>
public sealed record EvidenceEdge
{
/// <summary>
/// Source node identifier.
/// </summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>
/// Target node identifier.
/// </summary>
[JsonPropertyName("target")]
public required string Target { get; init; }
/// <summary>
/// Relationship type.
/// </summary>
[JsonPropertyName("relationship")]
public required string Relationship { get; init; }
}
/// <summary>
/// Metadata about the reconciliation process.
/// </summary>
public sealed record EvidenceGraphMetadata
{
[JsonPropertyName("artifactCount")]
public int ArtifactCount { get; init; }
[JsonPropertyName("sbomCount")]
public int SbomCount { get; init; }
[JsonPropertyName("attestationCount")]
public int AttestationCount { get; init; }
[JsonPropertyName("vexStatementCount")]
public int VexStatementCount { get; init; }
[JsonPropertyName("conflictCount")]
public int ConflictCount { get; init; }
[JsonPropertyName("reconciliationDurationMs")]
public long ReconciliationDurationMs { get; init; }
}
/// <summary>
/// Serializes evidence graphs deterministically for integrity verification.
/// </summary>
public sealed class EvidenceGraphSerializer
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
private static readonly JsonSerializerOptions PrettySerializerOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
/// <summary>
/// Serializes an evidence graph to deterministic JSON.
/// </summary>
public string Serialize(EvidenceGraph graph, bool pretty = false)
{
ArgumentNullException.ThrowIfNull(graph);
// Ensure deterministic ordering
var orderedGraph = new EvidenceGraph
{
SchemaVersion = graph.SchemaVersion,
GeneratedAt = graph.GeneratedAt,
Generator = graph.Generator,
Nodes = graph.Nodes
.OrderBy(n => n.Id, StringComparer.Ordinal)
.ToList(),
Edges = graph.Edges
.OrderBy(e => e.Source, StringComparer.Ordinal)
.ThenBy(e => e.Target, StringComparer.Ordinal)
.ThenBy(e => e.Relationship, StringComparer.Ordinal)
.ToList(),
Metadata = graph.Metadata
};
return JsonSerializer.Serialize(
orderedGraph,
pretty ? PrettySerializerOptions : SerializerOptions);
}
/// <summary>
/// Computes the SHA-256 hash of the serialized graph.
/// </summary>
public string ComputeHash(EvidenceGraph graph)
{
var json = Serialize(graph, pretty: false);
var bytes = Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexStringLower(hash);
}
/// <summary>
/// Writes the evidence graph and manifest files.
/// </summary>
public async Task WriteAsync(EvidenceGraph graph, string outputDirectory, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(graph);
ArgumentException.ThrowIfNullOrWhiteSpace(outputDirectory);
Directory.CreateDirectory(outputDirectory);
var json = Serialize(graph, pretty: true);
var hash = ComputeHash(graph);
var graphPath = Path.Combine(outputDirectory, "evidence-graph.json");
var hashPath = Path.Combine(outputDirectory, "evidence-graph.sha256");
await File.WriteAllTextAsync(graphPath, json, Encoding.UTF8, ct);
await File.WriteAllTextAsync(hashPath, hash, Encoding.UTF8, ct);
}
/// <summary>
/// Reads and validates an evidence graph from files.
/// </summary>
public async Task<(EvidenceGraph Graph, bool HashValid)> ReadAsync(
string outputDirectory,
CancellationToken ct = default)
{
var graphPath = Path.Combine(outputDirectory, "evidence-graph.json");
var hashPath = Path.Combine(outputDirectory, "evidence-graph.sha256");
var json = await File.ReadAllTextAsync(graphPath, ct);
var expectedHash = (await File.ReadAllTextAsync(hashPath, ct)).Trim();
var graph = JsonSerializer.Deserialize<EvidenceGraph>(json, SerializerOptions)
?? throw new InvalidOperationException("Failed to deserialize evidence graph.");
var actualHash = ComputeHash(graph);
var hashValid = expectedHash.Equals(actualHash, StringComparison.OrdinalIgnoreCase);
return (graph, hashValid);
}
}

View File

@@ -0,0 +1,325 @@
// =============================================================================
// IEvidenceReconciler.cs
// Main orchestrator for the 5-step evidence reconciliation algorithm
// =============================================================================
using System.Diagnostics;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Orchestrates the 5-step deterministic evidence reconciliation algorithm.
/// </summary>
public interface IEvidenceReconciler
{
/// <summary>
/// Reconciles evidence from an input directory into a deterministic evidence graph.
/// </summary>
/// <param name="inputDirectory">Directory containing SBOMs, attestations, and VEX documents.</param>
/// <param name="outputDirectory">Directory for output files.</param>
/// <param name="options">Reconciliation options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The reconciled evidence graph.</returns>
Task<EvidenceGraph> ReconcileAsync(
string inputDirectory,
string outputDirectory,
ReconciliationOptions? options = null,
CancellationToken ct = default);
}
/// <summary>
/// Options for evidence reconciliation.
/// </summary>
public sealed record ReconciliationOptions
{
public static readonly ReconciliationOptions Default = new();
/// <summary>
/// Whether to sign the output with DSSE.
/// </summary>
public bool SignOutput { get; init; }
/// <summary>
/// Key ID for DSSE signing.
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// JSON normalization options.
/// </summary>
public NormalizationOptions Normalization { get; init; } = NormalizationOptions.Default;
/// <summary>
/// Lattice configuration for precedence rules.
/// </summary>
public LatticeConfiguration Lattice { get; init; } = LatticeConfiguration.Default;
/// <summary>
/// Whether to verify attestation signatures.
/// </summary>
public bool VerifySignatures { get; init; } = true;
/// <summary>
/// Whether to verify Rekor inclusion proofs.
/// </summary>
public bool VerifyRekorProofs { get; init; }
}
/// <summary>
/// Default implementation of the evidence reconciler.
/// Implements the 5-step algorithm from advisory §5.
/// </summary>
public sealed class EvidenceReconciler : IEvidenceReconciler
{
private readonly EvidenceDirectoryDiscovery _discovery;
private readonly SourcePrecedenceLattice _lattice;
private readonly EvidenceGraphSerializer _serializer;
public EvidenceReconciler(
EvidenceDirectoryDiscovery? discovery = null,
SourcePrecedenceLattice? lattice = null,
EvidenceGraphSerializer? serializer = null)
{
_discovery = discovery ?? new EvidenceDirectoryDiscovery();
_lattice = lattice ?? new SourcePrecedenceLattice();
_serializer = serializer ?? new EvidenceGraphSerializer();
}
public async Task<EvidenceGraph> ReconcileAsync(
string inputDirectory,
string outputDirectory,
ReconciliationOptions? options = null,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(inputDirectory);
ArgumentException.ThrowIfNullOrWhiteSpace(outputDirectory);
options ??= ReconciliationOptions.Default;
var stopwatch = Stopwatch.StartNew();
// ========================================
// Step 1: Index artifacts by immutable digest
// ========================================
var index = await IndexArtifactsAsync(inputDirectory, ct);
// ========================================
// Step 2: Collect evidence for each artifact
// ========================================
var collectedIndex = await CollectEvidenceAsync(index, inputDirectory, options, ct);
// ========================================
// Step 3: Normalize all documents
// ========================================
// Normalization is applied during evidence collection
// ========================================
// Step 4: Apply lattice precedence rules
// ========================================
var mergedStatements = ApplyLatticeRules(collectedIndex);
// ========================================
// Step 5: Emit evidence graph
// ========================================
var graph = BuildGraph(collectedIndex, mergedStatements, stopwatch.ElapsedMilliseconds);
// Write output files
await _serializer.WriteAsync(graph, outputDirectory, ct);
// Optionally sign with DSSE
if (options.SignOutput && !string.IsNullOrEmpty(options.SigningKeyId))
{
await SignOutputAsync(outputDirectory, options.SigningKeyId, ct);
}
stopwatch.Stop();
return graph;
}
private async Task<ArtifactIndex> IndexArtifactsAsync(string inputDirectory, CancellationToken ct)
{
// Use the discovery service to find all artifacts
var discoveredFiles = await _discovery.DiscoverAsync(inputDirectory, ct);
var index = new ArtifactIndex();
foreach (var file in discoveredFiles)
{
// Create entry for each discovered file
var entry = ArtifactEntry.Empty(file.ContentHash, file.Path);
index.AddOrUpdate(entry);
}
return index;
}
private async Task<ArtifactIndex> CollectEvidenceAsync(
ArtifactIndex index,
string inputDirectory,
ReconciliationOptions options,
CancellationToken ct)
{
// In a full implementation, this would:
// 1. Parse SBOM files (CycloneDX, SPDX)
// 2. Parse attestation files (DSSE envelopes)
// 3. Parse VEX files (OpenVEX)
// 4. Validate signatures if enabled
// 5. Verify Rekor proofs if enabled
// For now, return the index with discovered files
await Task.CompletedTask;
return index;
}
private Dictionary<string, VexStatement> ApplyLatticeRules(ArtifactIndex index)
{
var mergedStatements = new Dictionary<string, VexStatement>(StringComparer.Ordinal);
foreach (var (digest, entry) in index.GetAll())
{
// Group VEX statements by vulnerability ID
var groupedByVuln = entry.VexDocuments
.GroupBy(v => v.VulnerabilityId, StringComparer.OrdinalIgnoreCase);
foreach (var group in groupedByVuln)
{
// Convert VexReference to VexStatement
var statements = group.Select(v => new VexStatement
{
VulnerabilityId = v.VulnerabilityId,
ProductId = digest,
Status = ParseVexStatus(v.Status),
Source = ParseSourcePrecedence(v.Source),
Justification = v.Justification,
DocumentRef = v.Path
}).ToList();
if (statements.Count > 0)
{
// Merge using lattice rules
var merged = _lattice.Merge(statements);
var key = $"{digest}:{merged.VulnerabilityId}";
mergedStatements[key] = merged;
}
}
}
return mergedStatements;
}
private EvidenceGraph BuildGraph(
ArtifactIndex index,
Dictionary<string, VexStatement> mergedStatements,
long elapsedMs)
{
var nodes = new List<EvidenceNode>();
var edges = new List<EvidenceEdge>();
int sbomCount = 0, attestationCount = 0, vexCount = 0;
foreach (var (digest, entry) in index.GetAll())
{
// Create node for artifact
var node = new EvidenceNode
{
Id = digest,
Type = "artifact",
Digest = digest,
Name = entry.Name,
Sboms = entry.Sboms.Select(s => new SbomNodeRef
{
Format = s.Format,
Path = s.Path,
ContentHash = s.ContentHash
}).ToList(),
Attestations = entry.Attestations.Select(a => new AttestationNodeRef
{
PredicateType = a.PredicateType,
Path = a.Path,
SignatureValid = a.SignatureValid,
RekorVerified = a.RekorVerified
}).ToList(),
VexStatements = mergedStatements
.Where(kv => kv.Key.StartsWith(digest + ":", StringComparison.Ordinal))
.Select(kv => new VexStatementRef
{
VulnerabilityId = kv.Value.VulnerabilityId,
Status = kv.Value.Status.ToString(),
Source = kv.Value.Source.ToString(),
Justification = kv.Value.Justification
}).ToList()
};
nodes.Add(node);
sbomCount += entry.Sboms.Count;
attestationCount += entry.Attestations.Count;
vexCount += entry.VexDocuments.Count;
// Create edges from artifacts to SBOMs
foreach (var sbom in entry.Sboms)
{
edges.Add(new EvidenceEdge
{
Source = digest,
Target = sbom.ContentHash,
Relationship = "described-by"
});
}
// Create edges from artifacts to attestations
foreach (var att in entry.Attestations)
{
edges.Add(new EvidenceEdge
{
Source = digest,
Target = att.Path,
Relationship = "attested-by"
});
}
}
return new EvidenceGraph
{
GeneratedAt = DateTimeOffset.UtcNow.ToString("O"),
Nodes = nodes,
Edges = edges,
Metadata = new EvidenceGraphMetadata
{
ArtifactCount = nodes.Count,
SbomCount = sbomCount,
AttestationCount = attestationCount,
VexStatementCount = mergedStatements.Count,
ConflictCount = 0, // TODO: Track conflicts during merge
ReconciliationDurationMs = elapsedMs
}
};
}
private static async Task SignOutputAsync(string outputDirectory, string keyId, CancellationToken ct)
{
// Placeholder for DSSE signing integration
// Would use the Signer module to create a DSSE envelope
await Task.CompletedTask;
}
private static VexStatus ParseVexStatus(string status)
{
return status.ToLowerInvariant() switch
{
"affected" => VexStatus.Affected,
"not_affected" or "notaffected" => VexStatus.NotAffected,
"fixed" => VexStatus.Fixed,
"under_investigation" or "underinvestigation" => VexStatus.UnderInvestigation,
_ => VexStatus.Unknown
};
}
private static SourcePrecedence ParseSourcePrecedence(string source)
{
return source.ToLowerInvariant() switch
{
"vendor" => SourcePrecedence.Vendor,
"maintainer" => SourcePrecedence.Maintainer,
"third-party" or "thirdparty" => SourcePrecedence.ThirdParty,
_ => SourcePrecedence.Unknown
};
}
}

View File

@@ -0,0 +1,270 @@
// =============================================================================
// JsonNormalizer.cs
// Deterministic JSON normalization for evidence reconciliation
// Part of Step 3: Normalization
// =============================================================================
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Provides deterministic JSON normalization for reproducible evidence graphs.
/// Implements stable sorting, timestamp stripping, and URI normalization.
/// </summary>
public static class JsonNormalizer
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
/// <summary>
/// Normalizes a JSON document for deterministic output.
/// </summary>
/// <param name="json">The JSON string to normalize.</param>
/// <param name="options">Normalization options.</param>
/// <returns>Normalized JSON string.</returns>
public static string Normalize(string json, NormalizationOptions? options = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(json);
options ??= NormalizationOptions.Default;
var node = JsonNode.Parse(json);
if (node is null)
{
return "null";
}
var normalized = NormalizeNode(node, options);
return normalized.ToJsonString(SerializerOptions);
}
/// <summary>
/// Normalizes a JSON node recursively.
/// </summary>
private static JsonNode? NormalizeNode(JsonNode? node, NormalizationOptions options)
{
return node switch
{
JsonObject obj => NormalizeObject(obj, options),
JsonArray arr => NormalizeArray(arr, options),
JsonValue val => NormalizeValue(val, options),
_ => node
};
}
/// <summary>
/// Normalizes a JSON object with stable key ordering.
/// </summary>
private static JsonObject NormalizeObject(JsonObject obj, NormalizationOptions options)
{
var normalized = new JsonObject();
// Sort keys using ordinal comparison for deterministic ordering
var sortedKeys = obj
.Select(kv => kv.Key)
.Where(key => !ShouldStripKey(key, options))
.OrderBy(k => k, StringComparer.Ordinal);
foreach (var key in sortedKeys)
{
var value = obj[key];
var normalizedKey = NormalizeKey(key, options);
var normalizedValue = NormalizeNode(value?.DeepClone(), options);
normalized[normalizedKey] = normalizedValue;
}
return normalized;
}
/// <summary>
/// Normalizes a JSON array with stable element ordering.
/// </summary>
private static JsonArray NormalizeArray(JsonArray arr, NormalizationOptions options)
{
var normalized = new JsonArray();
// For arrays of objects, sort by a deterministic key if possible
var elements = arr
.Select(n => NormalizeNode(n?.DeepClone(), options))
.ToList();
if (options.SortArrays && elements.All(e => e is JsonObject))
{
elements = elements
.Cast<JsonObject>()
.OrderBy(o => GetSortKey(o), StringComparer.Ordinal)
.Cast<JsonNode?>()
.ToList();
}
foreach (var element in elements)
{
normalized.Add(element);
}
return normalized;
}
/// <summary>
/// Normalizes a JSON value (strings, numbers, etc).
/// </summary>
private static JsonValue NormalizeValue(JsonValue val, NormalizationOptions options)
{
var value = val.GetValue<object>();
if (value is string str)
{
// Normalize URIs to lowercase
if (options.LowercaseUris && IsUri(str))
{
str = str.ToLowerInvariant();
}
// Strip or normalize timestamps
if (options.StripTimestamps && IsTimestamp(str))
{
return JsonValue.Create("__TIMESTAMP_STRIPPED__")!;
}
return JsonValue.Create(str)!;
}
return val.DeepClone().AsValue();
}
/// <summary>
/// Determines if a key should be stripped from output.
/// </summary>
private static bool ShouldStripKey(string key, NormalizationOptions options)
{
if (!options.StripTimestamps)
{
return false;
}
// Common timestamp field names
var timestampFields = new[]
{
"timestamp", "created", "modified", "updated", "createdAt", "updatedAt",
"modifiedAt", "date", "time", "datetime", "lastModified", "generated"
};
return timestampFields.Any(f => key.Equals(f, StringComparison.OrdinalIgnoreCase));
}
/// <summary>
/// Normalizes a key (e.g., to camelCase).
/// </summary>
private static string NormalizeKey(string key, NormalizationOptions options)
{
if (!options.NormalizeKeys)
{
return key;
}
// Basic camelCase conversion
if (key.Length > 0 && char.IsUpper(key[0]))
{
return char.ToLowerInvariant(key[0]) + key[1..];
}
return key;
}
/// <summary>
/// Gets a deterministic sort key for a JSON object.
/// </summary>
private static string GetSortKey(JsonObject obj)
{
// Priority order for sort keys
var keyPriority = new[] { "id", "@id", "name", "digest", "uri", "ref" };
foreach (var key in keyPriority)
{
if (obj.TryGetPropertyValue(key, out var value) && value is JsonValue jv)
{
var val = jv.GetValue<object>();
if (val is string str)
{
return str;
}
}
}
// Fallback: serialize and hash
return obj.ToJsonString();
}
/// <summary>
/// Checks if a string looks like a URI.
/// </summary>
private static bool IsUri(string value)
{
return value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("https://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("urn:", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Checks if a string looks like a timestamp.
/// </summary>
private static bool IsTimestamp(string value)
{
// ISO 8601 pattern detection
if (value.Length >= 10 &&
char.IsDigit(value[0]) &&
char.IsDigit(value[1]) &&
char.IsDigit(value[2]) &&
char.IsDigit(value[3]) &&
value[4] == '-')
{
return DateTimeOffset.TryParse(value, out _);
}
return false;
}
}
/// <summary>
/// Options for JSON normalization.
/// </summary>
public sealed record NormalizationOptions
{
/// <summary>
/// Default normalization options for evidence reconciliation.
/// </summary>
public static readonly NormalizationOptions Default = new()
{
SortArrays = true,
LowercaseUris = true,
StripTimestamps = true,
NormalizeKeys = true
};
/// <summary>
/// Sort arrays of objects by deterministic key.
/// </summary>
public bool SortArrays { get; init; }
/// <summary>
/// Lowercase all URI values.
/// </summary>
public bool LowercaseUris { get; init; }
/// <summary>
/// Strip or normalize timestamp fields.
/// </summary>
public bool StripTimestamps { get; init; }
/// <summary>
/// Normalize JSON keys to camelCase.
/// </summary>
public bool NormalizeKeys { get; init; }
}

View File

@@ -0,0 +1,249 @@
// =============================================================================
// SourcePrecedenceLattice.cs
// Lattice-based precedence rules for VEX merge conflict resolution
// Part of Step 4: Lattice Rules
// =============================================================================
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Source precedence levels for VEX document authority.
/// Higher values indicate higher authority.
/// Precedence: Vendor > Maintainer > ThirdParty > Unknown
/// </summary>
public enum SourcePrecedence
{
/// <summary>Unknown or unspecified source.</summary>
Unknown = 0,
/// <summary>Third-party security researcher or tool.</summary>
ThirdParty = 10,
/// <summary>Package or project maintainer.</summary>
Maintainer = 20,
/// <summary>Software vendor (highest authority).</summary>
Vendor = 30
}
/// <summary>
/// VEX status values following OpenVEX specification.
/// </summary>
public enum VexStatus
{
/// <summary>Status not yet determined.</summary>
Unknown,
/// <summary>Component is affected by the vulnerability.</summary>
Affected,
/// <summary>Component is not affected by the vulnerability.</summary>
NotAffected,
/// <summary>A fix is available for the vulnerability.</summary>
Fixed,
/// <summary>Vulnerability status is under investigation.</summary>
UnderInvestigation
}
/// <summary>
/// Represents a VEX statement with source precedence for lattice merge.
/// </summary>
public sealed record VexStatement
{
public required string VulnerabilityId { get; init; }
public required string ProductId { get; init; }
public required VexStatus Status { get; init; }
public required SourcePrecedence Source { get; init; }
public string? Justification { get; init; }
public string? ActionStatement { get; init; }
public DateTimeOffset? Timestamp { get; init; }
public string? DocumentRef { get; init; }
}
/// <summary>
/// Implements lattice-based precedence rules for VEX document merging.
/// </summary>
public sealed class SourcePrecedenceLattice
{
private readonly LatticeConfiguration _config;
public SourcePrecedenceLattice(LatticeConfiguration? config = null)
{
_config = config ?? LatticeConfiguration.Default;
}
/// <summary>
/// Merges multiple VEX statements for the same vulnerability/product pair.
/// Higher precedence sources win; ties are resolved by timestamp (most recent wins).
/// </summary>
public VexStatement Merge(IEnumerable<VexStatement> statements)
{
ArgumentNullException.ThrowIfNull(statements);
var statementList = statements.ToList();
if (statementList.Count == 0)
{
throw new ArgumentException("At least one statement is required.", nameof(statements));
}
if (statementList.Count == 1)
{
return statementList[0];
}
// Validate all statements are for the same vuln/product
var vulnId = statementList[0].VulnerabilityId;
var productId = statementList[0].ProductId;
if (!statementList.All(s =>
s.VulnerabilityId.Equals(vulnId, StringComparison.OrdinalIgnoreCase) &&
s.ProductId.Equals(productId, StringComparison.OrdinalIgnoreCase)))
{
throw new ArgumentException(
"All statements must be for the same vulnerability/product pair.",
nameof(statements));
}
// Sort by precedence (descending), then by timestamp (descending)
var winner = statementList
.OrderByDescending(s => (int)s.Source)
.ThenByDescending(s => s.Timestamp ?? DateTimeOffset.MinValue)
.First();
return winner;
}
/// <summary>
/// Merges two VEX statements, returning the one with higher authority.
/// </summary>
public VexStatement Merge(VexStatement a, VexStatement b)
{
ArgumentNullException.ThrowIfNull(a);
ArgumentNullException.ThrowIfNull(b);
return Merge([a, b]);
}
/// <summary>
/// Compares two source precedence levels.
/// Returns positive if a > b, negative if a < b, 0 if equal.
/// </summary>
public static int Compare(SourcePrecedence a, SourcePrecedence b)
{
return ((int)a).CompareTo((int)b);
}
/// <summary>
/// Determines the join (supremum) of two precedence levels in the lattice.
/// </summary>
public static SourcePrecedence Join(SourcePrecedence a, SourcePrecedence b)
{
return (SourcePrecedence)Math.Max((int)a, (int)b);
}
/// <summary>
/// Determines the meet (infimum) of two precedence levels in the lattice.
/// </summary>
public static SourcePrecedence Meet(SourcePrecedence a, SourcePrecedence b)
{
return (SourcePrecedence)Math.Min((int)a, (int)b);
}
/// <summary>
/// Resolves conflicts between VEX statements with same precedence.
/// </summary>
public ConflictResolution ResolveConflict(VexStatement a, VexStatement b)
{
ArgumentNullException.ThrowIfNull(a);
ArgumentNullException.ThrowIfNull(b);
// Different precedence - no conflict
if (a.Source != b.Source)
{
var winner = Compare(a.Source, b.Source) > 0 ? a : b;
return new ConflictResolution(
HasConflict: false,
Winner: winner,
Reason: $"Higher precedence: {winner.Source}");
}
// Same precedence - use timestamp
var aTime = a.Timestamp ?? DateTimeOffset.MinValue;
var bTime = b.Timestamp ?? DateTimeOffset.MinValue;
if (aTime != bTime)
{
var winner = aTime > bTime ? a : b;
return new ConflictResolution(
HasConflict: false,
Winner: winner,
Reason: "More recent timestamp wins");
}
// Same precedence and timestamp - true conflict
// Use status priority: NotAffected > Fixed > UnderInvestigation > Affected > Unknown
var statusPriority = new Dictionary<VexStatus, int>
{
[VexStatus.NotAffected] = 5,
[VexStatus.Fixed] = 4,
[VexStatus.UnderInvestigation] = 3,
[VexStatus.Affected] = 2,
[VexStatus.Unknown] = 1
};
var aPriority = statusPriority.GetValueOrDefault(a.Status, 0);
var bPriority = statusPriority.GetValueOrDefault(b.Status, 0);
if (aPriority != bPriority)
{
var winner = aPriority > bPriority ? a : b;
return new ConflictResolution(
HasConflict: true,
Winner: winner,
Reason: $"Status priority: {winner.Status} > {(winner == a ? b : a).Status}");
}
// Absolute tie - deterministic fallback (alphabetical by document ref)
var docRefCompare = string.Compare(
a.DocumentRef ?? "",
b.DocumentRef ?? "",
StringComparison.Ordinal);
var fallbackWinner = docRefCompare <= 0 ? a : b;
return new ConflictResolution(
HasConflict: true,
Winner: fallbackWinner,
Reason: "Deterministic fallback (document ref ordering)");
}
}
/// <summary>
/// Result of conflict resolution between VEX statements.
/// </summary>
public sealed record ConflictResolution(
bool HasConflict,
VexStatement Winner,
string Reason);
/// <summary>
/// Configuration for the precedence lattice.
/// </summary>
public sealed record LatticeConfiguration
{
public static readonly LatticeConfiguration Default = new();
/// <summary>
/// Custom precedence mappings for specific sources.
/// </summary>
public IReadOnlyDictionary<string, SourcePrecedence> SourceMappings { get; init; }
= new Dictionary<string, SourcePrecedence>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Whether to prefer more restrictive statuses in conflicts (e.g., Affected over NotAffected).
/// Default is false (less restrictive wins).
/// </summary>
public bool PreferRestrictive { get; init; }
}