up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-27 23:44:42 +02:00
parent ef6e4b2067
commit 3b96b2e3ea
298 changed files with 47516 additions and 1168 deletions

View File

@@ -33,6 +33,7 @@ internal static class CommandFactory
root.Add(BuildScannerCommand(services, verboseOption, cancellationToken));
root.Add(BuildScanCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildRubyCommand(services, verboseOption, cancellationToken));
root.Add(BuildPhpCommand(services, verboseOption, cancellationToken));
root.Add(BuildDatabaseCommand(services, verboseOption, cancellationToken));
root.Add(BuildSourcesCommand(services, verboseOption, cancellationToken));
root.Add(BuildAocCommand(services, verboseOption, cancellationToken));
@@ -252,6 +253,40 @@ internal static class CommandFactory
return ruby;
}
private static Command BuildPhpCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var php = new Command("php", "Work with PHP analyzer outputs.");
var inspect = new Command("inspect", "Inspect a local PHP workspace.");
var inspectRootOption = new Option<string?>("--root")
{
Description = "Path to the PHP workspace (defaults to current directory)."
};
var inspectFormatOption = new Option<string?>("--format")
{
Description = "Output format (table or json)."
};
inspect.Add(inspectRootOption);
inspect.Add(inspectFormatOption);
inspect.SetAction((parseResult, _) =>
{
var root = parseResult.GetValue(inspectRootOption);
var format = parseResult.GetValue(inspectFormatOption) ?? "table";
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandlePhpInspectAsync(
services,
root,
format,
verbose,
cancellationToken);
});
php.Add(inspect);
return php;
}
private static Command BuildKmsCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var kms = new Command("kms", "Manage file-backed signing keys.");

View File

@@ -38,6 +38,7 @@ using StellaOps.Scanner.Analyzers.Lang.Java;
using StellaOps.Scanner.Analyzers.Lang.Node;
using StellaOps.Scanner.Analyzers.Lang.Python;
using StellaOps.Scanner.Analyzers.Lang.Ruby;
using StellaOps.Scanner.Analyzers.Lang.Php;
using StellaOps.Policy;
using StellaOps.PolicyDsl;
@@ -7154,6 +7155,122 @@ internal static class CommandHandlers
}
}
public static async Task HandlePhpInspectAsync(
IServiceProvider services,
string? rootPath,
string format,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("php-inspect");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.php.inspect", ActivityKind.Internal);
activity?.SetTag("stellaops.cli.command", "php inspect");
using var duration = CliMetrics.MeasureCommandDuration("php inspect");
var outcome = "unknown";
try
{
var normalizedFormat = string.IsNullOrWhiteSpace(format)
? "table"
: format.Trim().ToLowerInvariant();
if (normalizedFormat is not ("table" or "json"))
{
throw new InvalidOperationException("Format must be either 'table' or 'json'.");
}
var targetRoot = string.IsNullOrWhiteSpace(rootPath)
? Directory.GetCurrentDirectory()
: Path.GetFullPath(rootPath);
if (!Directory.Exists(targetRoot))
{
throw new DirectoryNotFoundException($"Directory '{targetRoot}' was not found.");
}
logger.LogInformation("Inspecting PHP workspace in {Root}.", targetRoot);
activity?.SetTag("stellaops.cli.php.root", targetRoot);
var engine = new LanguageAnalyzerEngine(new ILanguageAnalyzer[] { new PhpLanguageAnalyzer() });
var context = new LanguageAnalyzerContext(targetRoot, TimeProvider.System);
var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false);
var report = PhpInspectReport.Create(result.ToSnapshots());
activity?.SetTag("stellaops.cli.php.package_count", report.Packages.Count);
if (string.Equals(normalizedFormat, "json", StringComparison.Ordinal))
{
var options = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
Console.WriteLine(JsonSerializer.Serialize(report, options));
}
else
{
RenderPhpInspectReport(report);
}
outcome = report.Packages.Count == 0 ? "empty" : "ok";
Environment.ExitCode = 0;
}
catch (DirectoryNotFoundException ex)
{
outcome = "not_found";
logger.LogError(ex.Message);
Environment.ExitCode = 71;
}
catch (InvalidOperationException ex)
{
outcome = "invalid";
logger.LogError(ex.Message);
Environment.ExitCode = 64;
}
catch (Exception ex)
{
outcome = "error";
logger.LogError(ex, "PHP inspect failed.");
Environment.ExitCode = 70;
}
finally
{
verbosity.MinimumLevel = previousLevel;
CliMetrics.RecordPhpInspect(outcome);
}
}
private static void RenderPhpInspectReport(PhpInspectReport report)
{
if (!report.Packages.Any())
{
AnsiConsole.MarkupLine("[yellow]No PHP packages detected.[/]");
return;
}
var table = new Table().Border(TableBorder.Rounded);
table.AddColumn("Package");
table.AddColumn("Version");
table.AddColumn("Type");
table.AddColumn(new TableColumn("Lockfile").NoWrap());
table.AddColumn("Dev");
foreach (var entry in report.Packages)
{
var dev = entry.IsDev ? "[grey]yes[/]" : "-";
table.AddRow(
Markup.Escape(entry.Name),
Markup.Escape(entry.Version ?? "-"),
Markup.Escape(entry.Type ?? "-"),
Markup.Escape(entry.Lockfile ?? "-"),
dev);
}
AnsiConsole.Write(table);
}
private static void RenderRubyInspectReport(RubyInspectReport report)
{
if (!report.Packages.Any())
@@ -7662,6 +7779,113 @@ internal static class CommandHandlers
}
}
private sealed class PhpInspectReport
{
[JsonPropertyName("packages")]
public IReadOnlyList<PhpInspectEntry> Packages { get; }
private PhpInspectReport(IReadOnlyList<PhpInspectEntry> packages)
{
Packages = packages;
}
public static PhpInspectReport Create(IEnumerable<LanguageComponentSnapshot>? snapshots)
{
var source = snapshots?.ToArray() ?? Array.Empty<LanguageComponentSnapshot>();
var entries = source
.Where(static snapshot => string.Equals(snapshot.Type, "composer", StringComparison.OrdinalIgnoreCase))
.Select(PhpInspectEntry.FromSnapshot)
.OrderBy(static entry => entry.Name, StringComparer.OrdinalIgnoreCase)
.ThenBy(static entry => entry.Version ?? string.Empty, StringComparer.OrdinalIgnoreCase)
.ToArray();
return new PhpInspectReport(entries);
}
}
private sealed record PhpInspectEntry(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("version")] string? Version,
[property: JsonPropertyName("type")] string? Type,
[property: JsonPropertyName("lockfile")] string? Lockfile,
[property: JsonPropertyName("isDev")] bool IsDev,
[property: JsonPropertyName("source")] string? Source,
[property: JsonPropertyName("distSha")] string? DistSha)
{
public static PhpInspectEntry FromSnapshot(LanguageComponentSnapshot snapshot)
{
var metadata = PhpMetadataHelpers.Clone(snapshot.Metadata);
var type = PhpMetadataHelpers.GetString(metadata, "type");
var lockfile = PhpMetadataHelpers.GetString(metadata, "lockfile");
var isDev = PhpMetadataHelpers.GetBool(metadata, "isDev") ?? false;
var source = PhpMetadataHelpers.GetString(metadata, "source");
var distSha = PhpMetadataHelpers.GetString(metadata, "distSha");
return new PhpInspectEntry(
snapshot.Name,
snapshot.Version,
type,
lockfile,
isDev,
source,
distSha);
}
}
private static class PhpMetadataHelpers
{
public static IDictionary<string, string?> Clone(IDictionary<string, string?>? metadata)
{
if (metadata is null || metadata.Count == 0)
{
return new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase);
}
var clone = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in metadata)
{
clone[pair.Key] = pair.Value;
}
return clone;
}
public static string? GetString(IDictionary<string, string?> metadata, string key)
{
if (metadata.TryGetValue(key, out var value))
{
return value;
}
foreach (var pair in metadata)
{
if (string.Equals(pair.Key, key, StringComparison.OrdinalIgnoreCase))
{
return pair.Value;
}
}
return null;
}
public static bool? GetBool(IDictionary<string, string?> metadata, string key)
{
var value = GetString(metadata, key);
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (bool.TryParse(value, out var parsed))
{
return parsed;
}
return null;
}
}
private sealed record LockValidationEntry(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("version")] string? Version,

View File

@@ -52,6 +52,7 @@
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Ruby/StellaOps.Scanner.Analyzers.Lang.Ruby.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Php/StellaOps.Scanner.Analyzers.Lang.Php.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
<ProjectReference Include="../../Policy/StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />

View File

@@ -1,12 +1,12 @@
using System;
using System.Diagnostics.Metrics;
namespace StellaOps.Cli.Telemetry;
internal static class CliMetrics
{
private static readonly Meter Meter = new("StellaOps.Cli", "1.0.0");
using System;
using System.Diagnostics.Metrics;
namespace StellaOps.Cli.Telemetry;
internal static class CliMetrics
{
private static readonly Meter Meter = new("StellaOps.Cli", "1.0.0");
private static readonly Counter<long> ScannerDownloadCounter = Meter.CreateCounter<long>("stellaops.cli.scanner.download.count");
private static readonly Counter<long> ScannerInstallCounter = Meter.CreateCounter<long>("stellaops.cli.scanner.install.count");
private static readonly Counter<long> ScanRunCounter = Meter.CreateCounter<long>("stellaops.cli.scan.run.count");
@@ -26,19 +26,20 @@ internal static class CliMetrics
private static readonly Counter<long> JavaLockValidateCounter = Meter.CreateCounter<long>("stellaops.cli.java.lock_validate.count");
private static readonly Counter<long> RubyInspectCounter = Meter.CreateCounter<long>("stellaops.cli.ruby.inspect.count");
private static readonly Counter<long> RubyResolveCounter = Meter.CreateCounter<long>("stellaops.cli.ruby.resolve.count");
private static readonly Counter<long> PhpInspectCounter = Meter.CreateCounter<long>("stellaops.cli.php.inspect.count");
private static readonly Histogram<double> CommandDurationHistogram = Meter.CreateHistogram<double>("stellaops.cli.command.duration.ms");
public static void RecordScannerDownload(string channel, bool fromCache)
=> ScannerDownloadCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("channel", channel),
new("cache", fromCache ? "hit" : "miss")
});
public static void RecordScannerInstall(string channel)
=> ScannerInstallCounter.Add(1, new KeyValuePair<string, object?>[] { new("channel", channel) });
public static void RecordScanRun(string runner, int exitCode)
new("channel", channel),
new("cache", fromCache ? "hit" : "miss")
});
public static void RecordScannerInstall(string channel)
=> ScannerInstallCounter.Add(1, new KeyValuePair<string, object?>[] { new("channel", channel) });
public static void RecordScanRun(string runner, int exitCode)
=> ScanRunCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("runner", runner),
@@ -143,34 +144,40 @@ internal static class CliMetrics
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
});
public static void RecordPhpInspect(string outcome)
=> PhpInspectCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
});
public static IDisposable MeasureCommandDuration(string command)
{
var start = DateTime.UtcNow;
return new DurationScope(command, start);
}
private sealed class DurationScope : IDisposable
{
private readonly string _command;
private readonly DateTime _start;
private bool _disposed;
public DurationScope(string command, DateTime start)
{
_command = command;
_start = start;
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
var elapsed = (DateTime.UtcNow - _start).TotalMilliseconds;
CommandDurationHistogram.Record(elapsed, new KeyValuePair<string, object?>[] { new("command", _command) });
}
}
}
}
private sealed class DurationScope : IDisposable
{
private readonly string _command;
private readonly DateTime _start;
private bool _disposed;
public DurationScope(string command, DateTime start)
{
_command = command;
_start = start;
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
var elapsed = (DateTime.UtcNow - _start).TotalMilliseconds;
CommandDurationHistogram.Record(elapsed, new KeyValuePair<string, object?>[] { new("command", _command) });
}
}
}

View File

@@ -0,0 +1,88 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Excititor.WebService.Contracts;
/// <summary>
/// Response for /attestations/vex/{attestationId} endpoint.
/// </summary>
public sealed record VexAttestationDetailResponse(
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("predicateType")] string PredicateType,
[property: JsonPropertyName("subject")] VexAttestationSubject Subject,
[property: JsonPropertyName("builder")] VexAttestationBuilderIdentity Builder,
[property: JsonPropertyName("verification")] VexAttestationVerificationState Verification,
[property: JsonPropertyName("chainOfCustody")] IReadOnlyList<VexAttestationCustodyLink> ChainOfCustody,
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string> Metadata);
/// <summary>
/// Subject of the attestation (what was signed).
/// </summary>
public sealed record VexAttestationSubject(
[property: JsonPropertyName("digest")] string Digest,
[property: JsonPropertyName("digestAlgorithm")] string DigestAlgorithm,
[property: JsonPropertyName("name")] string? Name,
[property: JsonPropertyName("uri")] string? Uri);
/// <summary>
/// Builder identity for the attestation.
/// </summary>
public sealed record VexAttestationBuilderIdentity(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("version")] string? Version,
[property: JsonPropertyName("builderId")] string? BuilderId,
[property: JsonPropertyName("invocationId")] string? InvocationId);
/// <summary>
/// DSSE verification state.
/// </summary>
public sealed record VexAttestationVerificationState(
[property: JsonPropertyName("valid")] bool Valid,
[property: JsonPropertyName("verifiedAt")] DateTimeOffset? VerifiedAt,
[property: JsonPropertyName("signatureType")] string? SignatureType,
[property: JsonPropertyName("keyId")] string? KeyId,
[property: JsonPropertyName("issuer")] string? Issuer,
[property: JsonPropertyName("envelopeDigest")] string? EnvelopeDigest,
[property: JsonPropertyName("diagnostics")] IReadOnlyDictionary<string, string> Diagnostics);
/// <summary>
/// Chain-of-custody link in the attestation provenance.
/// </summary>
public sealed record VexAttestationCustodyLink(
[property: JsonPropertyName("step")] int Step,
[property: JsonPropertyName("actor")] string Actor,
[property: JsonPropertyName("action")] string Action,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("reference")] string? Reference);
/// <summary>
/// Response for /attestations/vex/list endpoint.
/// </summary>
public sealed record VexAttestationListResponse(
[property: JsonPropertyName("items")] IReadOnlyList<VexAttestationListItem> Items,
[property: JsonPropertyName("cursor")] string? Cursor,
[property: JsonPropertyName("hasMore")] bool HasMore,
[property: JsonPropertyName("total")] int Total);
/// <summary>
/// Summary item for attestation list.
/// </summary>
public sealed record VexAttestationListItem(
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("predicateType")] string PredicateType,
[property: JsonPropertyName("subjectDigest")] string SubjectDigest,
[property: JsonPropertyName("valid")] bool Valid,
[property: JsonPropertyName("builderId")] string? BuilderId);
/// <summary>
/// Response for /attestations/vex/lookup endpoint.
/// </summary>
public sealed record VexAttestationLookupResponse(
[property: JsonPropertyName("subjectDigest")] string SubjectDigest,
[property: JsonPropertyName("attestations")] IReadOnlyList<VexAttestationListItem> Attestations,
[property: JsonPropertyName("queriedAt")] DateTimeOffset QueriedAt);

View File

@@ -0,0 +1,141 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Excititor.WebService.Contracts;
/// <summary>
/// Response for /evidence/vex/bundle/{bundleId} endpoint.
/// </summary>
public sealed record VexEvidenceBundleResponse(
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("contentHash")] string ContentHash,
[property: JsonPropertyName("format")] string Format,
[property: JsonPropertyName("itemCount")] int ItemCount,
[property: JsonPropertyName("verification")] VexEvidenceVerificationMetadata? Verification,
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string> Metadata);
/// <summary>
/// Verification metadata for evidence bundles.
/// </summary>
public sealed record VexEvidenceVerificationMetadata(
[property: JsonPropertyName("verified")] bool Verified,
[property: JsonPropertyName("verifiedAt")] DateTimeOffset? VerifiedAt,
[property: JsonPropertyName("signatureType")] string? SignatureType,
[property: JsonPropertyName("keyId")] string? KeyId,
[property: JsonPropertyName("issuer")] string? Issuer,
[property: JsonPropertyName("transparencyRef")] string? TransparencyRef);
/// <summary>
/// Response for /evidence/vex/list endpoint.
/// </summary>
public sealed record VexEvidenceListResponse(
[property: JsonPropertyName("items")] IReadOnlyList<VexEvidenceListItem> Items,
[property: JsonPropertyName("cursor")] string? Cursor,
[property: JsonPropertyName("hasMore")] bool HasMore,
[property: JsonPropertyName("total")] int Total);
/// <summary>
/// Summary item for evidence list.
/// </summary>
public sealed record VexEvidenceListItem(
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("contentHash")] string ContentHash,
[property: JsonPropertyName("format")] string Format,
[property: JsonPropertyName("itemCount")] int ItemCount,
[property: JsonPropertyName("verified")] bool Verified);
/// <summary>
/// Response for /evidence/vex/lookup endpoint.
/// </summary>
public sealed record VexEvidenceLookupResponse(
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
[property: JsonPropertyName("productKey")] string ProductKey,
[property: JsonPropertyName("evidenceItems")] IReadOnlyList<VexEvidenceItem> EvidenceItems,
[property: JsonPropertyName("queriedAt")] DateTimeOffset QueriedAt);
/// <summary>
/// Individual evidence item for a vuln/product pair.
/// </summary>
public sealed record VexEvidenceItem(
[property: JsonPropertyName("observationId")] string ObservationId,
[property: JsonPropertyName("providerId")] string ProviderId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("justification")] string? Justification,
[property: JsonPropertyName("firstSeen")] DateTimeOffset FirstSeen,
[property: JsonPropertyName("lastSeen")] DateTimeOffset LastSeen,
[property: JsonPropertyName("documentDigest")] string DocumentDigest,
[property: JsonPropertyName("verification")] VexEvidenceVerificationMetadata? Verification);
/// <summary>
/// Response for /vuln/evidence/vex/{advisory_key} endpoint.
/// Returns tenant-scoped raw statements for Vuln Explorer evidence tabs.
/// </summary>
public sealed record VexAdvisoryEvidenceResponse(
[property: JsonPropertyName("advisoryKey")] string AdvisoryKey,
[property: JsonPropertyName("canonicalKey")] string CanonicalKey,
[property: JsonPropertyName("scope")] string Scope,
[property: JsonPropertyName("aliases")] IReadOnlyList<VexAdvisoryLinkResponse> Aliases,
[property: JsonPropertyName("statements")] IReadOnlyList<VexAdvisoryStatementResponse> Statements,
[property: JsonPropertyName("queriedAt")] DateTimeOffset QueriedAt,
[property: JsonPropertyName("totalCount")] int TotalCount);
/// <summary>
/// Advisory link for traceability (CVE, GHSA, RHSA, etc.).
/// </summary>
public sealed record VexAdvisoryLinkResponse(
[property: JsonPropertyName("identifier")] string Identifier,
[property: JsonPropertyName("type")] string Type,
[property: JsonPropertyName("isOriginal")] bool IsOriginal);
/// <summary>
/// Raw VEX statement for an advisory with provenance and attestation metadata.
/// </summary>
public sealed record VexAdvisoryStatementResponse(
[property: JsonPropertyName("statementId")] string StatementId,
[property: JsonPropertyName("providerId")] string ProviderId,
[property: JsonPropertyName("product")] VexAdvisoryProductResponse Product,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("justification")] string? Justification,
[property: JsonPropertyName("detail")] string? Detail,
[property: JsonPropertyName("firstSeen")] DateTimeOffset FirstSeen,
[property: JsonPropertyName("lastSeen")] DateTimeOffset LastSeen,
[property: JsonPropertyName("provenance")] VexAdvisoryProvenanceResponse Provenance,
[property: JsonPropertyName("attestation")] VexAdvisoryAttestationResponse? Attestation);
/// <summary>
/// Product information for an advisory statement.
/// </summary>
public sealed record VexAdvisoryProductResponse(
[property: JsonPropertyName("key")] string Key,
[property: JsonPropertyName("name")] string? Name,
[property: JsonPropertyName("version")] string? Version,
[property: JsonPropertyName("purl")] string? Purl,
[property: JsonPropertyName("cpe")] string? Cpe);
/// <summary>
/// Provenance metadata for a VEX statement.
/// </summary>
public sealed record VexAdvisoryProvenanceResponse(
[property: JsonPropertyName("documentDigest")] string DocumentDigest,
[property: JsonPropertyName("documentFormat")] string DocumentFormat,
[property: JsonPropertyName("sourceUri")] string SourceUri,
[property: JsonPropertyName("revision")] string? Revision,
[property: JsonPropertyName("insertedAt")] DateTimeOffset InsertedAt);
/// <summary>
/// Attestation metadata for signature verification.
/// </summary>
public sealed record VexAdvisoryAttestationResponse(
[property: JsonPropertyName("signatureType")] string SignatureType,
[property: JsonPropertyName("issuer")] string? Issuer,
[property: JsonPropertyName("subject")] string? Subject,
[property: JsonPropertyName("keyId")] string? KeyId,
[property: JsonPropertyName("verifiedAt")] DateTimeOffset? VerifiedAt,
[property: JsonPropertyName("transparencyLogRef")] string? TransparencyLogRef,
[property: JsonPropertyName("trustWeight")] decimal? TrustWeight,
[property: JsonPropertyName("trustTier")] string? TrustTier);

View File

@@ -0,0 +1,347 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
namespace StellaOps.Excititor.WebService.Endpoints;
/// <summary>
/// Attestation API endpoints (WEB-OBS-54-001).
/// Exposes /attestations/vex/* endpoints returning DSSE verification state,
/// builder identity, and chain-of-custody links.
/// </summary>
public static class AttestationEndpoints
{
public static void MapAttestationEndpoints(this WebApplication app)
{
// GET /attestations/vex/list - List attestations
app.MapGet("/attestations/vex/list", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IMongoDatabase database,
TimeProvider timeProvider,
[FromQuery] int? limit,
[FromQuery] string? cursor,
[FromQuery] string? vulnerabilityId,
[FromQuery] string? productKey,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var take = Math.Clamp(limit.GetValueOrDefault(50), 1, 200);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Attestations);
var builder = Builders<BsonDocument>.Filter;
var filters = new List<FilterDefinition<BsonDocument>>();
if (!string.IsNullOrWhiteSpace(vulnerabilityId))
{
filters.Add(builder.Eq("VulnerabilityId", vulnerabilityId.Trim().ToUpperInvariant()));
}
if (!string.IsNullOrWhiteSpace(productKey))
{
filters.Add(builder.Eq("ProductKey", productKey.Trim().ToLowerInvariant()));
}
// Parse cursor if provided
if (!string.IsNullOrWhiteSpace(cursor) && TryDecodeCursor(cursor, out var cursorTime, out var cursorId))
{
var ltTime = builder.Lt("IssuedAt", cursorTime);
var eqTimeLtId = builder.And(
builder.Eq("IssuedAt", cursorTime),
builder.Lt("_id", cursorId));
filters.Add(builder.Or(ltTime, eqTimeLtId));
}
var filter = filters.Count == 0 ? builder.Empty : builder.And(filters);
var sort = Builders<BsonDocument>.Sort.Descending("IssuedAt").Descending("_id");
var documents = await collection
.Find(filter)
.Sort(sort)
.Limit(take)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var items = documents.Select(doc => ToListItem(doc, tenant, timeProvider)).ToList();
string? nextCursor = null;
var hasMore = documents.Count == take;
if (hasMore && documents.Count > 0)
{
var last = documents[^1];
var lastTime = last.GetValue("IssuedAt", BsonNull.Value).ToUniversalTime();
var lastId = last.GetValue("_id", BsonNull.Value).AsString;
nextCursor = EncodeCursor(lastTime, lastId);
}
var response = new VexAttestationListResponse(items, nextCursor, hasMore, items.Count);
return Results.Ok(response);
}).WithName("ListVexAttestations");
// GET /attestations/vex/{attestationId} - Get attestation details
app.MapGet("/attestations/vex/{attestationId}", async (
HttpContext context,
string attestationId,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexAttestationLinkStore attestationStore,
TimeProvider timeProvider,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(attestationId))
{
return Results.BadRequest(new { error = new { code = "ERR_ATTESTATION_ID", message = "attestationId is required" } });
}
var attestation = await attestationStore.FindAsync(attestationId.Trim(), cancellationToken).ConfigureAwait(false);
if (attestation is null)
{
return Results.NotFound(new { error = new { code = "ERR_NOT_FOUND", message = $"Attestation '{attestationId}' not found" } });
}
// Build subject from observation context
var subjectDigest = attestation.Metadata.TryGetValue("digest", out var dig) ? dig : attestation.ObservationId;
var subject = new VexAttestationSubject(
Digest: subjectDigest,
DigestAlgorithm: "sha256",
Name: $"{attestation.VulnerabilityId}/{attestation.ProductKey}",
Uri: null);
var builder = new VexAttestationBuilderIdentity(
Id: attestation.SupplierId,
Version: null,
BuilderId: attestation.SupplierId,
InvocationId: attestation.ObservationId);
// Get verification state from metadata
var isValid = attestation.Metadata.TryGetValue("verified", out var verified) && verified == "true";
DateTimeOffset? verifiedAt = null;
if (attestation.Metadata.TryGetValue("verifiedAt", out var verifiedAtStr) &&
DateTimeOffset.TryParse(verifiedAtStr, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsedVerifiedAt))
{
verifiedAt = parsedVerifiedAt;
}
var verification = new VexAttestationVerificationState(
Valid: isValid,
VerifiedAt: verifiedAt,
SignatureType: attestation.Metadata.GetValueOrDefault("signatureType", "dsse"),
KeyId: attestation.Metadata.GetValueOrDefault("keyId"),
Issuer: attestation.Metadata.GetValueOrDefault("issuer"),
EnvelopeDigest: attestation.Metadata.GetValueOrDefault("envelopeDigest"),
Diagnostics: attestation.Metadata);
var custodyLinks = new List<VexAttestationCustodyLink>
{
new(
Step: 1,
Actor: attestation.SupplierId,
Action: "created",
Timestamp: attestation.IssuedAt,
Reference: attestation.AttestationId)
};
// Add linkset link
custodyLinks.Add(new VexAttestationCustodyLink(
Step: 2,
Actor: "excititor",
Action: "linked_to_observation",
Timestamp: attestation.IssuedAt,
Reference: attestation.LinksetId));
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["observationId"] = attestation.ObservationId,
["linksetId"] = attestation.LinksetId,
["vulnerabilityId"] = attestation.VulnerabilityId,
["productKey"] = attestation.ProductKey
};
if (!string.IsNullOrWhiteSpace(attestation.JustificationSummary))
{
metadata["justificationSummary"] = attestation.JustificationSummary;
}
var response = new VexAttestationDetailResponse(
AttestationId: attestation.AttestationId,
Tenant: tenant,
CreatedAt: attestation.IssuedAt,
PredicateType: attestation.Metadata.GetValueOrDefault("predicateType", "https://in-toto.io/attestation/v1"),
Subject: subject,
Builder: builder,
Verification: verification,
ChainOfCustody: custodyLinks,
Metadata: metadata);
return Results.Ok(response);
}).WithName("GetVexAttestation");
// GET /attestations/vex/lookup - Lookup attestations by linkset or observation
app.MapGet("/attestations/vex/lookup", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IMongoDatabase database,
TimeProvider timeProvider,
[FromQuery] string? linksetId,
[FromQuery] string? observationId,
[FromQuery] int? limit,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(linksetId) && string.IsNullOrWhiteSpace(observationId))
{
return Results.BadRequest(new { error = new { code = "ERR_PARAMS", message = "Either linksetId or observationId is required" } });
}
var take = Math.Clamp(limit.GetValueOrDefault(50), 1, 100);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Attestations);
var builder = Builders<BsonDocument>.Filter;
FilterDefinition<BsonDocument> filter;
if (!string.IsNullOrWhiteSpace(linksetId))
{
filter = builder.Eq("LinksetId", linksetId.Trim());
}
else
{
filter = builder.Eq("ObservationId", observationId!.Trim());
}
var sort = Builders<BsonDocument>.Sort.Descending("IssuedAt");
var documents = await collection
.Find(filter)
.Sort(sort)
.Limit(take)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var items = documents.Select(doc => ToListItem(doc, tenant, timeProvider)).ToList();
var response = new VexAttestationLookupResponse(
SubjectDigest: linksetId ?? observationId ?? string.Empty,
Attestations: items,
QueriedAt: timeProvider.GetUtcNow());
return Results.Ok(response);
}).WithName("LookupVexAttestations");
}
private static VexAttestationListItem ToListItem(BsonDocument doc, string tenant, TimeProvider timeProvider)
{
return new VexAttestationListItem(
AttestationId: doc.GetValue("_id", BsonNull.Value).AsString ?? string.Empty,
Tenant: tenant,
CreatedAt: doc.GetValue("IssuedAt", BsonNull.Value).IsBsonDateTime
? new DateTimeOffset(doc["IssuedAt"].ToUniversalTime(), TimeSpan.Zero)
: timeProvider.GetUtcNow(),
PredicateType: "https://in-toto.io/attestation/v1",
SubjectDigest: doc.GetValue("ObservationId", BsonNull.Value).AsString ?? string.Empty,
Valid: doc.Contains("Metadata") && !doc["Metadata"].IsBsonNull &&
doc["Metadata"].AsBsonDocument.Contains("verified") &&
doc["Metadata"]["verified"].AsString == "true",
BuilderId: doc.GetValue("SupplierId", BsonNull.Value).AsString);
}
private static bool TryResolveTenant(HttpContext context, VexMongoStorageOptions options, out string tenant, out IResult? problem)
{
tenant = options.DefaultTenant;
problem = null;
if (context.Request.Headers.TryGetValue("X-Stella-Tenant", out var headerValues) && headerValues.Count > 0)
{
var requestedTenant = headerValues[0]?.Trim();
if (string.IsNullOrEmpty(requestedTenant))
{
problem = Results.BadRequest(new { error = new { code = "ERR_TENANT", message = "X-Stella-Tenant header must not be empty" } });
return false;
}
if (!string.Equals(requestedTenant, options.DefaultTenant, StringComparison.OrdinalIgnoreCase))
{
problem = Results.Json(
new { error = new { code = "ERR_TENANT_FORBIDDEN", message = $"Tenant '{requestedTenant}' is not allowed" } },
statusCode: StatusCodes.Status403Forbidden);
return false;
}
tenant = requestedTenant;
}
return true;
}
private static bool TryDecodeCursor(string cursor, out DateTime timestamp, out string id)
{
timestamp = default;
id = string.Empty;
try
{
var payload = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(cursor));
var parts = payload.Split('|');
if (parts.Length != 2)
{
return false;
}
if (!DateTimeOffset.TryParse(parts[0], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed))
{
return false;
}
timestamp = parsed.UtcDateTime;
id = parts[1];
return true;
}
catch
{
return false;
}
}
private static string EncodeCursor(DateTime timestamp, string id)
{
var payload = FormattableString.Invariant($"{timestamp:O}|{id}");
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload));
}
}

View File

@@ -0,0 +1,311 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Canonicalization;
using StellaOps.Excititor.Core.Observations;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
namespace StellaOps.Excititor.WebService.Endpoints;
/// <summary>
/// Evidence API endpoints (WEB-OBS-53-001).
/// Exposes /evidence/vex/* endpoints that fetch locker bundles, enforce scopes,
/// and surface verification metadata without synthesizing verdicts.
/// </summary>
public static class EvidenceEndpoints
{
public static void MapEvidenceEndpoints(this WebApplication app)
{
// GET /evidence/vex/list - List evidence exports
app.MapGet("/evidence/vex/list", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IMongoDatabase database,
TimeProvider timeProvider,
[FromQuery] int? limit,
[FromQuery] string? cursor,
[FromQuery] string? format,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var take = Math.Clamp(limit.GetValueOrDefault(50), 1, 200);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Exports);
var builder = Builders<BsonDocument>.Filter;
var filters = new List<FilterDefinition<BsonDocument>>();
if (!string.IsNullOrWhiteSpace(format))
{
filters.Add(builder.Eq("Format", format.Trim().ToLowerInvariant()));
}
// Parse cursor if provided (base64-encoded timestamp|id)
if (!string.IsNullOrWhiteSpace(cursor) && TryDecodeCursor(cursor, out var cursorTime, out var cursorId))
{
var ltTime = builder.Lt("CreatedAt", cursorTime);
var eqTimeLtId = builder.And(
builder.Eq("CreatedAt", cursorTime),
builder.Lt("_id", cursorId));
filters.Add(builder.Or(ltTime, eqTimeLtId));
}
var filter = filters.Count == 0 ? builder.Empty : builder.And(filters);
var sort = Builders<BsonDocument>.Sort.Descending("CreatedAt").Descending("_id");
var documents = await collection
.Find(filter)
.Sort(sort)
.Limit(take)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var items = documents.Select(doc => new VexEvidenceListItem(
BundleId: doc.GetValue("ExportId", BsonNull.Value).AsString ?? doc.GetValue("_id", BsonNull.Value).AsString,
Tenant: tenant,
CreatedAt: doc.GetValue("CreatedAt", BsonNull.Value).IsBsonDateTime
? new DateTimeOffset(doc["CreatedAt"].ToUniversalTime(), TimeSpan.Zero)
: timeProvider.GetUtcNow(),
ContentHash: doc.GetValue("ArtifactDigest", BsonNull.Value).AsString ?? string.Empty,
Format: doc.GetValue("Format", BsonNull.Value).AsString ?? "json",
ItemCount: doc.GetValue("ClaimCount", BsonNull.Value).IsInt32 ? doc["ClaimCount"].AsInt32 : 0,
Verified: doc.Contains("Attestation") && !doc["Attestation"].IsBsonNull)).ToList();
string? nextCursor = null;
var hasMore = documents.Count == take;
if (hasMore && documents.Count > 0)
{
var last = documents[^1];
var lastTime = last.GetValue("CreatedAt", BsonNull.Value).ToUniversalTime();
var lastId = last.GetValue("_id", BsonNull.Value).AsString;
nextCursor = EncodeCursor(lastTime, lastId);
}
var response = new VexEvidenceListResponse(items, nextCursor, hasMore, items.Count);
return Results.Ok(response);
}).WithName("ListVexEvidence");
// GET /evidence/vex/bundle/{bundleId} - Get evidence bundle details
app.MapGet("/evidence/vex/bundle/{bundleId}", async (
HttpContext context,
string bundleId,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IMongoDatabase database,
TimeProvider timeProvider,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(bundleId))
{
return Results.BadRequest(new { error = new { code = "ERR_BUNDLE_ID", message = "bundleId is required" } });
}
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Exports);
var filter = Builders<BsonDocument>.Filter.Or(
Builders<BsonDocument>.Filter.Eq("_id", bundleId.Trim()),
Builders<BsonDocument>.Filter.Eq("ExportId", bundleId.Trim()));
var doc = await collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
if (doc is null)
{
return Results.NotFound(new { error = new { code = "ERR_NOT_FOUND", message = $"Evidence bundle '{bundleId}' not found" } });
}
VexEvidenceVerificationMetadata? verification = null;
if (doc.Contains("Attestation") && !doc["Attestation"].IsBsonNull)
{
var att = doc["Attestation"].AsBsonDocument;
verification = new VexEvidenceVerificationMetadata(
Verified: true,
VerifiedAt: att.Contains("SignedAt") && att["SignedAt"].IsBsonDateTime
? new DateTimeOffset(att["SignedAt"].ToUniversalTime(), TimeSpan.Zero)
: null,
SignatureType: "dsse",
KeyId: att.GetValue("KeyId", BsonNull.Value).AsString,
Issuer: att.GetValue("Issuer", BsonNull.Value).AsString,
TransparencyRef: att.Contains("Rekor") && !att["Rekor"].IsBsonNull
? att["Rekor"].AsBsonDocument.GetValue("Location", BsonNull.Value).AsString
: null);
}
var metadata = new Dictionary<string, string>(StringComparer.Ordinal);
if (doc.Contains("SourceProviders") && doc["SourceProviders"].IsBsonArray)
{
metadata["sourceProviders"] = string.Join(",", doc["SourceProviders"].AsBsonArray.Select(v => v.AsString));
}
if (doc.Contains("PolicyRevisionId") && !doc["PolicyRevisionId"].IsBsonNull)
{
metadata["policyRevisionId"] = doc["PolicyRevisionId"].AsString;
}
var response = new VexEvidenceBundleResponse(
BundleId: doc.GetValue("ExportId", BsonNull.Value).AsString ?? bundleId.Trim(),
Tenant: tenant,
CreatedAt: doc.GetValue("CreatedAt", BsonNull.Value).IsBsonDateTime
? new DateTimeOffset(doc["CreatedAt"].ToUniversalTime(), TimeSpan.Zero)
: timeProvider.GetUtcNow(),
ContentHash: doc.GetValue("ArtifactDigest", BsonNull.Value).AsString ?? string.Empty,
Format: doc.GetValue("Format", BsonNull.Value).AsString ?? "json",
ItemCount: doc.GetValue("ClaimCount", BsonNull.Value).IsInt32 ? doc["ClaimCount"].AsInt32 : 0,
Verification: verification,
Metadata: metadata);
return Results.Ok(response);
}).WithName("GetVexEvidenceBundle");
// GET /evidence/vex/lookup - Lookup evidence for vuln/product pair
app.MapGet("/evidence/vex/lookup", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexObservationProjectionService projectionService,
TimeProvider timeProvider,
[FromQuery] string vulnerabilityId,
[FromQuery] string productKey,
[FromQuery] int? limit,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(vulnerabilityId) || string.IsNullOrWhiteSpace(productKey))
{
return Results.BadRequest(new { error = new { code = "ERR_PARAMS", message = "vulnerabilityId and productKey are required" } });
}
var take = Math.Clamp(limit.GetValueOrDefault(100), 1, 500);
var request = new VexObservationProjectionRequest(
tenant,
vulnerabilityId.Trim(),
productKey.Trim(),
ImmutableHashSet<string>.Empty,
ImmutableHashSet<VexClaimStatus>.Empty,
null,
take);
var result = await projectionService.QueryAsync(request, cancellationToken).ConfigureAwait(false);
var items = result.Statements.Select(s => new VexEvidenceItem(
ObservationId: s.ObservationId,
ProviderId: s.ProviderId,
Status: s.Status.ToString().ToLowerInvariant(),
Justification: s.Justification?.ToString().ToLowerInvariant(),
FirstSeen: s.FirstSeen,
LastSeen: s.LastSeen,
DocumentDigest: s.Document.Digest,
Verification: s.Signature is null ? null : new VexEvidenceVerificationMetadata(
Verified: s.Signature.VerifiedAt.HasValue,
VerifiedAt: s.Signature.VerifiedAt,
SignatureType: s.Signature.Type,
KeyId: s.Signature.KeyId,
Issuer: s.Signature.Issuer,
TransparencyRef: null))).ToList();
var response = new VexEvidenceLookupResponse(
VulnerabilityId: vulnerabilityId.Trim(),
ProductKey: productKey.Trim(),
EvidenceItems: items,
QueriedAt: timeProvider.GetUtcNow());
return Results.Ok(response);
}).WithName("LookupVexEvidence");
}
private static bool TryResolveTenant(HttpContext context, VexMongoStorageOptions options, out string tenant, out IResult? problem)
{
tenant = options.DefaultTenant;
problem = null;
if (context.Request.Headers.TryGetValue("X-Stella-Tenant", out var headerValues) && headerValues.Count > 0)
{
var requestedTenant = headerValues[0]?.Trim();
if (string.IsNullOrEmpty(requestedTenant))
{
problem = Results.BadRequest(new { error = new { code = "ERR_TENANT", message = "X-Stella-Tenant header must not be empty" } });
return false;
}
if (!string.Equals(requestedTenant, options.DefaultTenant, StringComparison.OrdinalIgnoreCase))
{
problem = Results.Json(
new { error = new { code = "ERR_TENANT_FORBIDDEN", message = $"Tenant '{requestedTenant}' is not allowed" } },
statusCode: StatusCodes.Status403Forbidden);
return false;
}
tenant = requestedTenant;
}
return true;
}
private static bool TryDecodeCursor(string cursor, out DateTime timestamp, out string id)
{
timestamp = default;
id = string.Empty;
try
{
var payload = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(cursor));
var parts = payload.Split('|');
if (parts.Length != 2)
{
return false;
}
if (!DateTimeOffset.TryParse(parts[0], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed))
{
return false;
}
timestamp = parsed.UtcDateTime;
id = parts[1];
return true;
}
catch
{
return false;
}
}
private static string EncodeCursor(DateTime timestamp, string id)
{
var payload = FormattableString.Invariant($"{timestamp:O}|{id}");
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload));
}
}

View File

@@ -0,0 +1,366 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core.Observations;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
using StellaOps.Excititor.WebService.Telemetry;
namespace StellaOps.Excititor.WebService.Endpoints;
/// <summary>
/// Linkset API endpoints (EXCITITOR-LNM-21-202).
/// Exposes /vex/linksets/* endpoints that surface alias mappings, conflict markers,
/// and provenance proofs exactly as stored. Errors map to ERR_AGG_* codes.
/// </summary>
public static class LinksetEndpoints
{
public static void MapLinksetEndpoints(this WebApplication app)
{
var group = app.MapGroup("/vex/linksets");
// GET /vex/linksets - List linksets with filters
group.MapGet("", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexLinksetStore linksetStore,
[FromQuery] int? limit,
[FromQuery] string? cursor,
[FromQuery] string? vulnerabilityId,
[FromQuery] string? productKey,
[FromQuery] string? providerId,
[FromQuery] bool? hasConflicts,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var take = Math.Clamp(limit.GetValueOrDefault(50), 1, 100);
IReadOnlyList<VexLinkset> linksets;
// Route to appropriate query method based on filters
if (hasConflicts == true)
{
linksets = await linksetStore
.FindWithConflictsAsync(tenant, take, cancellationToken)
.ConfigureAwait(false);
}
else if (!string.IsNullOrWhiteSpace(vulnerabilityId))
{
linksets = await linksetStore
.FindByVulnerabilityAsync(tenant, vulnerabilityId.Trim(), take, cancellationToken)
.ConfigureAwait(false);
}
else if (!string.IsNullOrWhiteSpace(productKey))
{
linksets = await linksetStore
.FindByProductKeyAsync(tenant, productKey.Trim(), take, cancellationToken)
.ConfigureAwait(false);
}
else if (!string.IsNullOrWhiteSpace(providerId))
{
linksets = await linksetStore
.FindByProviderAsync(tenant, providerId.Trim(), take, cancellationToken)
.ConfigureAwait(false);
}
else
{
return Results.BadRequest(new
{
error = new
{
code = "ERR_AGG_PARAMS",
message = "At least one filter is required: vulnerabilityId, productKey, providerId, or hasConflicts=true"
}
});
}
var items = linksets
.Take(take)
.Select(ToListItem)
.ToList();
// Record conflict metrics (EXCITITOR-OBS-51-001)
foreach (var linkset in linksets.Take(take))
{
if (linkset.HasConflicts)
{
LinksetTelemetry.RecordLinksetDisagreements(tenant, linkset);
}
}
var hasMore = linksets.Count > take;
string? nextCursor = null;
if (hasMore && items.Count > 0)
{
var last = linksets[items.Count - 1];
nextCursor = EncodeCursor(last.UpdatedAt.UtcDateTime, last.LinksetId);
}
var response = new VexLinksetListResponse(items, nextCursor);
return Results.Ok(response);
}).WithName("ListVexLinksets");
// GET /vex/linksets/{linksetId} - Get linkset by ID
group.MapGet("/{linksetId}", async (
HttpContext context,
string linksetId,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexLinksetStore linksetStore,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(linksetId))
{
return Results.BadRequest(new
{
error = new { code = "ERR_AGG_PARAMS", message = "linksetId is required" }
});
}
var linkset = await linksetStore
.GetByIdAsync(tenant, linksetId.Trim(), cancellationToken)
.ConfigureAwait(false);
if (linkset is null)
{
return Results.NotFound(new
{
error = new { code = "ERR_AGG_NOT_FOUND", message = $"Linkset '{linksetId}' not found" }
});
}
var response = ToDetailResponse(linkset);
return Results.Ok(response);
}).WithName("GetVexLinkset");
// GET /vex/linksets/lookup - Lookup linkset by vulnerability and product
group.MapGet("/lookup", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexLinksetStore linksetStore,
[FromQuery] string? vulnerabilityId,
[FromQuery] string? productKey,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(vulnerabilityId) || string.IsNullOrWhiteSpace(productKey))
{
return Results.BadRequest(new
{
error = new { code = "ERR_AGG_PARAMS", message = "vulnerabilityId and productKey are required" }
});
}
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId.Trim(), productKey.Trim());
var linkset = await linksetStore
.GetByIdAsync(tenant, linksetId, cancellationToken)
.ConfigureAwait(false);
if (linkset is null)
{
return Results.NotFound(new
{
error = new { code = "ERR_AGG_NOT_FOUND", message = "No linkset found for the specified vulnerability and product" }
});
}
var response = ToDetailResponse(linkset);
return Results.Ok(response);
}).WithName("LookupVexLinkset");
// GET /vex/linksets/count - Get linkset counts for tenant
group.MapGet("/count", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexLinksetStore linksetStore,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var total = await linksetStore
.CountAsync(tenant, cancellationToken)
.ConfigureAwait(false);
var withConflicts = await linksetStore
.CountWithConflictsAsync(tenant, cancellationToken)
.ConfigureAwait(false);
return Results.Ok(new LinksetCountResponse(total, withConflicts));
}).WithName("CountVexLinksets");
// GET /vex/linksets/conflicts - List linksets with conflicts (shorthand)
group.MapGet("/conflicts", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexLinksetStore linksetStore,
[FromQuery] int? limit,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var take = Math.Clamp(limit.GetValueOrDefault(50), 1, 100);
var linksets = await linksetStore
.FindWithConflictsAsync(tenant, take, cancellationToken)
.ConfigureAwait(false);
var items = linksets.Select(ToListItem).ToList();
var response = new VexLinksetListResponse(items, null);
return Results.Ok(response);
}).WithName("ListVexLinksetConflicts");
}
private static VexLinksetListItem ToListItem(VexLinkset linkset)
{
return new VexLinksetListItem(
LinksetId: linkset.LinksetId,
Tenant: linkset.Tenant,
VulnerabilityId: linkset.VulnerabilityId,
ProductKey: linkset.ProductKey,
ProviderIds: linkset.ProviderIds.ToList(),
Statuses: linkset.Statuses.ToList(),
Aliases: Array.Empty<string>(), // Aliases are in observations, not linksets
Purls: Array.Empty<string>(),
Cpes: Array.Empty<string>(),
References: Array.Empty<VexLinksetReference>(),
Disagreements: linkset.Disagreements
.Select(d => new VexLinksetDisagreement(d.ProviderId, d.Status, d.Justification, d.Confidence))
.ToList(),
Observations: linkset.Observations
.Select(o => new VexLinksetObservationRef(o.ObservationId, o.ProviderId, o.Status, o.Confidence))
.ToList(),
CreatedAt: linkset.CreatedAt);
}
private static VexLinksetDetailResponse ToDetailResponse(VexLinkset linkset)
{
return new VexLinksetDetailResponse(
LinksetId: linkset.LinksetId,
Tenant: linkset.Tenant,
VulnerabilityId: linkset.VulnerabilityId,
ProductKey: linkset.ProductKey,
ProviderIds: linkset.ProviderIds.ToList(),
Statuses: linkset.Statuses.ToList(),
Confidence: linkset.Confidence.ToString().ToLowerInvariant(),
HasConflicts: linkset.HasConflicts,
Disagreements: linkset.Disagreements
.Select(d => new VexLinksetDisagreement(d.ProviderId, d.Status, d.Justification, d.Confidence))
.ToList(),
Observations: linkset.Observations
.Select(o => new VexLinksetObservationRef(o.ObservationId, o.ProviderId, o.Status, o.Confidence))
.ToList(),
CreatedAt: linkset.CreatedAt,
UpdatedAt: linkset.UpdatedAt);
}
private static bool TryResolveTenant(
HttpContext context,
VexMongoStorageOptions options,
out string tenant,
out IResult? problem)
{
problem = null;
tenant = string.Empty;
var headerTenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
if (!string.IsNullOrWhiteSpace(headerTenant))
{
tenant = headerTenant.Trim().ToLowerInvariant();
}
else if (!string.IsNullOrWhiteSpace(options.DefaultTenant))
{
tenant = options.DefaultTenant.Trim().ToLowerInvariant();
}
else
{
problem = Results.BadRequest(new
{
error = new { code = "ERR_TENANT", message = "X-Stella-Tenant header is required" }
});
return false;
}
return true;
}
private static string EncodeCursor(DateTime timestamp, string id)
{
var raw = $"{timestamp:O}|{id}";
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(raw));
}
}
// Detail response for single linkset
public sealed record VexLinksetDetailResponse(
[property: JsonPropertyName("linksetId")] string LinksetId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
[property: JsonPropertyName("productKey")] string ProductKey,
[property: JsonPropertyName("providerIds")] IReadOnlyList<string> ProviderIds,
[property: JsonPropertyName("statuses")] IReadOnlyList<string> Statuses,
[property: JsonPropertyName("confidence")] string Confidence,
[property: JsonPropertyName("hasConflicts")] bool HasConflicts,
[property: JsonPropertyName("disagreements")] IReadOnlyList<VexLinksetDisagreement> Disagreements,
[property: JsonPropertyName("observations")] IReadOnlyList<VexLinksetObservationRef> Observations,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("updatedAt")] DateTimeOffset UpdatedAt);
// Count response
public sealed record LinksetCountResponse(
[property: JsonPropertyName("total")] long Total,
[property: JsonPropertyName("withConflicts")] long WithConflicts);

View File

@@ -0,0 +1,310 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core.Observations;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
namespace StellaOps.Excititor.WebService.Endpoints;
/// <summary>
/// Observation API endpoints (EXCITITOR-LNM-21-201).
/// Exposes /vex/observations/* endpoints with filters for advisory/product/provider,
/// strict RBAC, and deterministic pagination (no derived verdict fields).
/// </summary>
public static class ObservationEndpoints
{
public static void MapObservationEndpoints(this WebApplication app)
{
var group = app.MapGroup("/vex/observations");
// GET /vex/observations - List observations with filters
group.MapGet("", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexObservationStore observationStore,
TimeProvider timeProvider,
[FromQuery] int? limit,
[FromQuery] string? cursor,
[FromQuery] string? vulnerabilityId,
[FromQuery] string? productKey,
[FromQuery] string? providerId,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var take = Math.Clamp(limit.GetValueOrDefault(50), 1, 100);
IReadOnlyList<VexObservation> observations;
// Route to appropriate query method based on filters
if (!string.IsNullOrWhiteSpace(vulnerabilityId) && !string.IsNullOrWhiteSpace(productKey))
{
observations = await observationStore
.FindByVulnerabilityAndProductAsync(tenant, vulnerabilityId.Trim(), productKey.Trim(), cancellationToken)
.ConfigureAwait(false);
}
else if (!string.IsNullOrWhiteSpace(providerId))
{
observations = await observationStore
.FindByProviderAsync(tenant, providerId.Trim(), take, cancellationToken)
.ConfigureAwait(false);
}
else
{
// No filter - return empty for now (full list requires pagination infrastructure)
return Results.BadRequest(new
{
error = new
{
code = "ERR_PARAMS",
message = "At least one filter is required: vulnerabilityId+productKey or providerId"
}
});
}
var items = observations
.Take(take)
.Select(obs => ToListItem(obs))
.ToList();
var hasMore = observations.Count > take;
string? nextCursor = null;
if (hasMore && items.Count > 0)
{
var last = observations[items.Count - 1];
nextCursor = EncodeCursor(last.CreatedAt.UtcDateTime, last.ObservationId);
}
var response = new VexObservationListResponse(items, nextCursor);
return Results.Ok(response);
}).WithName("ListVexObservations");
// GET /vex/observations/{observationId} - Get observation by ID
group.MapGet("/{observationId}", async (
HttpContext context,
string observationId,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexObservationStore observationStore,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(observationId))
{
return Results.BadRequest(new
{
error = new { code = "ERR_PARAMS", message = "observationId is required" }
});
}
var observation = await observationStore
.GetByIdAsync(tenant, observationId.Trim(), cancellationToken)
.ConfigureAwait(false);
if (observation is null)
{
return Results.NotFound(new
{
error = new { code = "ERR_NOT_FOUND", message = $"Observation '{observationId}' not found" }
});
}
var response = ToDetailResponse(observation);
return Results.Ok(response);
}).WithName("GetVexObservation");
// GET /vex/observations/count - Get observation count for tenant
group.MapGet("/count", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexObservationStore observationStore,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
var count = await observationStore
.CountAsync(tenant, cancellationToken)
.ConfigureAwait(false);
return Results.Ok(new { count });
}).WithName("CountVexObservations");
}
private static VexObservationListItem ToListItem(VexObservation obs)
{
var firstStatement = obs.Statements.FirstOrDefault();
return new VexObservationListItem(
ObservationId: obs.ObservationId,
Tenant: obs.Tenant,
ProviderId: obs.ProviderId,
VulnerabilityId: firstStatement?.VulnerabilityId ?? string.Empty,
ProductKey: firstStatement?.ProductKey ?? string.Empty,
Status: firstStatement?.Status.ToString().ToLowerInvariant() ?? "unknown",
CreatedAt: obs.CreatedAt,
LastObserved: firstStatement?.LastObserved,
Purls: obs.Linkset.Purls.ToList());
}
private static VexObservationDetailResponse ToDetailResponse(VexObservation obs)
{
var upstream = new VexObservationUpstreamResponse(
obs.Upstream.UpstreamId,
obs.Upstream.DocumentVersion,
obs.Upstream.FetchedAt,
obs.Upstream.ReceivedAt,
obs.Upstream.ContentHash,
obs.Upstream.Signature.Present
? new VexObservationSignatureResponse(
obs.Upstream.Signature.Format ?? "dsse",
obs.Upstream.Signature.KeyId,
Issuer: null,
VerifiedAtUtc: null)
: null);
var content = new VexObservationContentResponse(
obs.Content.Format,
obs.Content.SpecVersion);
var statements = obs.Statements
.Select(stmt => new VexObservationStatementItem(
stmt.VulnerabilityId,
stmt.ProductKey,
stmt.Status.ToString().ToLowerInvariant(),
stmt.LastObserved,
stmt.Locator,
stmt.Justification?.ToString().ToLowerInvariant(),
stmt.IntroducedVersion,
stmt.FixedVersion))
.ToList();
var linkset = new VexObservationLinksetResponse(
obs.Linkset.Aliases.ToList(),
obs.Linkset.Purls.ToList(),
obs.Linkset.Cpes.ToList(),
obs.Linkset.References.Select(r => new VexObservationReferenceItem(r.Type, r.Url)).ToList());
return new VexObservationDetailResponse(
obs.ObservationId,
obs.Tenant,
obs.ProviderId,
obs.StreamId,
upstream,
content,
statements,
linkset,
obs.CreatedAt);
}
private static bool TryResolveTenant(
HttpContext context,
VexMongoStorageOptions options,
out string tenant,
out IResult? problem)
{
problem = null;
tenant = string.Empty;
var headerTenant = context.Request.Headers["X-Stella-Tenant"].FirstOrDefault();
if (!string.IsNullOrWhiteSpace(headerTenant))
{
tenant = headerTenant.Trim().ToLowerInvariant();
}
else if (!string.IsNullOrWhiteSpace(options.DefaultTenant))
{
tenant = options.DefaultTenant.Trim().ToLowerInvariant();
}
else
{
problem = Results.BadRequest(new
{
error = new { code = "ERR_TENANT", message = "X-Stella-Tenant header is required" }
});
return false;
}
return true;
}
private static string EncodeCursor(DateTime timestamp, string id)
{
var raw = $"{timestamp:O}|{id}";
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(raw));
}
}
// Additional response DTOs for observation detail
public sealed record VexObservationUpstreamResponse(
[property: System.Text.Json.Serialization.JsonPropertyName("upstreamId")] string UpstreamId,
[property: System.Text.Json.Serialization.JsonPropertyName("documentVersion")] string? DocumentVersion,
[property: System.Text.Json.Serialization.JsonPropertyName("fetchedAt")] DateTimeOffset FetchedAt,
[property: System.Text.Json.Serialization.JsonPropertyName("receivedAt")] DateTimeOffset ReceivedAt,
[property: System.Text.Json.Serialization.JsonPropertyName("contentHash")] string ContentHash,
[property: System.Text.Json.Serialization.JsonPropertyName("signature")] VexObservationSignatureResponse? Signature);
public sealed record VexObservationContentResponse(
[property: System.Text.Json.Serialization.JsonPropertyName("format")] string Format,
[property: System.Text.Json.Serialization.JsonPropertyName("specVersion")] string? SpecVersion);
public sealed record VexObservationStatementItem(
[property: System.Text.Json.Serialization.JsonPropertyName("vulnerabilityId")] string VulnerabilityId,
[property: System.Text.Json.Serialization.JsonPropertyName("productKey")] string ProductKey,
[property: System.Text.Json.Serialization.JsonPropertyName("status")] string Status,
[property: System.Text.Json.Serialization.JsonPropertyName("lastObserved")] DateTimeOffset? LastObserved,
[property: System.Text.Json.Serialization.JsonPropertyName("locator")] string? Locator,
[property: System.Text.Json.Serialization.JsonPropertyName("justification")] string? Justification,
[property: System.Text.Json.Serialization.JsonPropertyName("introducedVersion")] string? IntroducedVersion,
[property: System.Text.Json.Serialization.JsonPropertyName("fixedVersion")] string? FixedVersion);
public sealed record VexObservationLinksetResponse(
[property: System.Text.Json.Serialization.JsonPropertyName("aliases")] IReadOnlyList<string> Aliases,
[property: System.Text.Json.Serialization.JsonPropertyName("purls")] IReadOnlyList<string> Purls,
[property: System.Text.Json.Serialization.JsonPropertyName("cpes")] IReadOnlyList<string> Cpes,
[property: System.Text.Json.Serialization.JsonPropertyName("references")] IReadOnlyList<VexObservationReferenceItem> References);
public sealed record VexObservationReferenceItem(
[property: System.Text.Json.Serialization.JsonPropertyName("type")] string Type,
[property: System.Text.Json.Serialization.JsonPropertyName("url")] string Url);
public sealed record VexObservationDetailResponse(
[property: System.Text.Json.Serialization.JsonPropertyName("observationId")] string ObservationId,
[property: System.Text.Json.Serialization.JsonPropertyName("tenant")] string Tenant,
[property: System.Text.Json.Serialization.JsonPropertyName("providerId")] string ProviderId,
[property: System.Text.Json.Serialization.JsonPropertyName("streamId")] string StreamId,
[property: System.Text.Json.Serialization.JsonPropertyName("upstream")] VexObservationUpstreamResponse Upstream,
[property: System.Text.Json.Serialization.JsonPropertyName("content")] VexObservationContentResponse Content,
[property: System.Text.Json.Serialization.JsonPropertyName("statements")] IReadOnlyList<VexObservationStatementItem> Statements,
[property: System.Text.Json.Serialization.JsonPropertyName("linkset")] VexObservationLinksetResponse Linkset,
[property: System.Text.Json.Serialization.JsonPropertyName("createdAt")] DateTimeOffset CreatedAt);

View File

@@ -66,6 +66,7 @@ internal static class TelemetryExtensions
metrics
.AddMeter(IngestionTelemetry.MeterName)
.AddMeter(EvidenceTelemetry.MeterName)
.AddMeter(LinksetTelemetry.MeterName)
.AddAspNetCoreInstrumentation()
.AddHttpClientInstrumentation()
.AddRuntimeInstrumentation();

View File

@@ -76,6 +76,14 @@ services.AddRedHatCsafConnector();
services.Configure<MirrorDistributionOptions>(configuration.GetSection(MirrorDistributionOptions.SectionName));
services.AddSingleton<MirrorRateLimiter>();
services.TryAddSingleton(TimeProvider.System);
// CRYPTO-90-001: Crypto provider abstraction for pluggable hashing algorithms (GOST/SM support)
services.AddSingleton<IVexHashingService>(sp =>
{
// When ICryptoProviderRegistry is available, use it for pluggable algorithms
var registry = sp.GetService<StellaOps.Cryptography.ICryptoProviderRegistry>();
return new VexHashingService(registry);
});
services.AddSingleton<IVexObservationProjectionService, VexObservationProjectionService>();
services.AddScoped<IVexObservationQueryService, VexObservationQueryService>();
@@ -387,6 +395,471 @@ app.MapGet("/openapi/excititor.json", () =>
}
}
}
},
// WEB-OBS-53-001: Evidence API endpoints
["/evidence/vex/list"] = new
{
get = new
{
summary = "List VEX evidence exports",
parameters = new object[]
{
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false },
new { name = "limit", @in = "query", schema = new { type = "integer", minimum = 1, maximum = 100 }, required = false },
new { name = "cursor", @in = "query", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Evidence list response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["evidence-list"] = new
{
value = new
{
items = new[] {
new {
bundleId = "vex-bundle-2025-11-24-001",
tenant = "acme",
format = "openvex",
createdAt = "2025-11-24T00:00:00Z",
itemCount = 42,
merkleRoot = "sha256:abc123...",
sealed_ = false
}
},
nextCursor = (string?)null
}
}
}
}
}
}
}
}
},
["/evidence/vex/bundle/{bundleId}"] = new
{
get = new
{
summary = "Get VEX evidence bundle details",
parameters = new object[]
{
new { name = "bundleId", @in = "path", schema = new { type = "string" }, required = true },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Bundle detail response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["bundle-detail"] = new
{
value = new
{
bundleId = "vex-bundle-2025-11-24-001",
tenant = "acme",
format = "openvex",
specVersion = "0.2.0",
createdAt = "2025-11-24T00:00:00Z",
itemCount = 42,
merkleRoot = "sha256:abc123...",
sealed_ = false,
metadata = new { source = "excititor" }
}
}
}
}
}
},
["404"] = new
{
description = "Bundle not found",
content = new Dictionary<string, object>
{
["application/json"] = new
{
schema = new { @ref = "#/components/schemas/Error" }
}
}
}
}
}
},
["/evidence/vex/lookup"] = new
{
get = new
{
summary = "Lookup evidence for vulnerability/product pair",
parameters = new object[]
{
new { name = "vulnerabilityId", @in = "query", schema = new { type = "string" }, required = true, example = "CVE-2024-12345" },
new { name = "productKey", @in = "query", schema = new { type = "string" }, required = true, example = "pkg:npm/lodash@4.17.21" },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Evidence lookup response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["lookup-result"] = new
{
value = new
{
vulnerabilityId = "CVE-2024-12345",
productKey = "pkg:npm/lodash@4.17.21",
evidence = new[] {
new { bundleId = "vex-bundle-001", observationId = "obs-001" }
},
queriedAt = "2025-11-24T12:00:00Z"
}
}
}
}
}
}
}
}
},
// WEB-OBS-54-001: Attestation API endpoints
["/attestations/vex/list"] = new
{
get = new
{
summary = "List VEX attestations",
parameters = new object[]
{
new { name = "limit", @in = "query", schema = new { type = "integer", minimum = 1, maximum = 200 }, required = false },
new { name = "cursor", @in = "query", schema = new { type = "string" }, required = false },
new { name = "vulnerabilityId", @in = "query", schema = new { type = "string" }, required = false },
new { name = "productKey", @in = "query", schema = new { type = "string" }, required = false },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Attestation list response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["attestation-list"] = new
{
value = new
{
items = new[] {
new {
attestationId = "att-2025-001",
tenant = "acme",
createdAt = "2025-11-24T00:00:00Z",
predicateType = "https://in-toto.io/attestation/v1",
subjectDigest = "sha256:abc123...",
valid = true,
builderId = "excititor:redhat"
}
},
nextCursor = (string?)null,
hasMore = false,
count = 1
}
}
}
}
}
}
}
}
},
["/attestations/vex/{attestationId}"] = new
{
get = new
{
summary = "Get VEX attestation details with DSSE verification state",
parameters = new object[]
{
new { name = "attestationId", @in = "path", schema = new { type = "string" }, required = true },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Attestation detail response with chain-of-custody",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["attestation-detail"] = new
{
value = new
{
attestationId = "att-2025-001",
tenant = "acme",
createdAt = "2025-11-24T00:00:00Z",
predicateType = "https://in-toto.io/attestation/v1",
subject = new { digest = "sha256:abc123...", name = "CVE-2024-12345/pkg:npm/lodash@4.17.21" },
builder = new { id = "excititor:redhat", builderId = "excititor:redhat" },
verification = new { valid = true, verifiedAt = "2025-11-24T00:00:00Z", signatureType = "dsse" },
chainOfCustody = new[] {
new { step = 1, actor = "excititor:redhat", action = "created", timestamp = "2025-11-24T00:00:00Z" }
}
}
}
}
}
}
},
["404"] = new
{
description = "Attestation not found",
content = new Dictionary<string, object>
{
["application/json"] = new
{
schema = new { @ref = "#/components/schemas/Error" }
}
}
}
}
}
},
["/attestations/vex/lookup"] = new
{
get = new
{
summary = "Lookup attestations by linkset or observation",
parameters = new object[]
{
new { name = "linksetId", @in = "query", schema = new { type = "string" }, required = false },
new { name = "observationId", @in = "query", schema = new { type = "string" }, required = false },
new { name = "limit", @in = "query", schema = new { type = "integer", minimum = 1, maximum = 100 }, required = false },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Attestation lookup response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["lookup-result"] = new
{
value = new
{
subjectDigest = "linkset-001",
attestations = new[] {
new { attestationId = "att-001", valid = true }
},
queriedAt = "2025-11-24T12:00:00Z"
}
}
}
}
}
},
["400"] = new
{
description = "Missing required parameter",
content = new Dictionary<string, object>
{
["application/json"] = new
{
schema = new { @ref = "#/components/schemas/Error" }
}
}
}
}
}
},
// EXCITITOR-LNM-21-201: Observation API endpoints
["/vex/observations"] = new
{
get = new
{
summary = "List VEX observations with filters",
parameters = new object[]
{
new { name = "limit", @in = "query", schema = new { type = "integer", minimum = 1, maximum = 100 }, required = false },
new { name = "cursor", @in = "query", schema = new { type = "string" }, required = false },
new { name = "vulnerabilityId", @in = "query", schema = new { type = "string" }, required = false, example = "CVE-2024-12345" },
new { name = "productKey", @in = "query", schema = new { type = "string" }, required = false, example = "pkg:npm/lodash@4.17.21" },
new { name = "providerId", @in = "query", schema = new { type = "string" }, required = false, example = "excititor:redhat" },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Observation list response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["observation-list"] = new
{
value = new
{
items = new[] {
new {
observationId = "obs-2025-001",
tenant = "acme",
providerId = "excititor:redhat",
vulnerabilityId = "CVE-2024-12345",
productKey = "pkg:npm/lodash@4.17.21",
status = "not_affected",
createdAt = "2025-11-24T00:00:00Z"
}
},
nextCursor = (string?)null
}
}
}
}
}
},
["400"] = new
{
description = "Missing required filter",
content = new Dictionary<string, object>
{
["application/json"] = new
{
schema = new { @ref = "#/components/schemas/Error" },
examples = new Dictionary<string, object>
{
["missing-filter"] = new
{
value = new
{
error = new
{
code = "ERR_PARAMS",
message = "At least one filter is required: vulnerabilityId+productKey or providerId"
}
}
}
}
}
}
}
}
}
},
["/vex/observations/{observationId}"] = new
{
get = new
{
summary = "Get VEX observation by ID",
parameters = new object[]
{
new { name = "observationId", @in = "path", schema = new { type = "string" }, required = true },
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Observation detail response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["observation-detail"] = new
{
value = new
{
observationId = "obs-2025-001",
tenant = "acme",
providerId = "excititor:redhat",
streamId = "stream-001",
upstream = new { upstreamId = "RHSA-2024:001", fetchedAt = "2025-11-24T00:00:00Z" },
content = new { format = "csaf", specVersion = "2.0" },
statements = new[] {
new { vulnerabilityId = "CVE-2024-12345", productKey = "pkg:npm/lodash@4.17.21", status = "not_affected" }
},
linkset = new { aliases = new[] { "CVE-2024-12345" }, purls = new[] { "pkg:npm/lodash@4.17.21" } },
createdAt = "2025-11-24T00:00:00Z"
}
}
}
}
}
},
["404"] = new
{
description = "Observation not found",
content = new Dictionary<string, object>
{
["application/json"] = new
{
schema = new { @ref = "#/components/schemas/Error" }
}
}
}
}
}
},
["/vex/observations/count"] = new
{
get = new
{
summary = "Get observation count for tenant",
parameters = new object[]
{
new { name = "X-Stella-Tenant", @in = "header", schema = new { type = "string" }, required = false }
},
responses = new Dictionary<string, object>
{
["200"] = new
{
description = "Count response",
content = new Dictionary<string, object>
{
["application/json"] = new
{
examples = new Dictionary<string, object>
{
["count"] = new
{
value = new { count = 1234 }
}
}
}
}
}
}
}
}
},
components = new
@@ -451,6 +924,8 @@ app.MapPost("/airgap/v1/vex/import", async (
[FromServices] AirgapSignerTrustService trustService,
[FromServices] AirgapModeEnforcer modeEnforcer,
[FromServices] IAirgapImportStore store,
[FromServices] IVexTimelineEventEmitter timelineEmitter,
[FromServices] IVexHashingService hashingService,
[FromServices] ILoggerFactory loggerFactory,
[FromServices] TimeProvider timeProvider,
[FromBody] AirgapImportRequest request,
@@ -465,6 +940,7 @@ app.MapPost("/airgap/v1/vex/import", async (
? (int?)null
: (int)Math.Round((nowUtc - request.SignedAt.Value).TotalSeconds);
var traceId = Activity.Current?.TraceId.ToString();
var timeline = new List<AirgapTimelineEntry>();
void RecordEvent(string eventType, string? code = null, string? message = null)
{
@@ -481,6 +957,54 @@ app.MapPost("/airgap/v1/vex/import", async (
};
timeline.Add(entry);
logger.LogInformation("Airgap timeline event {EventType} bundle={BundleId} gen={Gen} tenant={Tenant} code={Code}", eventType, entry.BundleId, entry.MirrorGeneration, tenantId, code);
// WEB-AIRGAP-58-001: Emit timeline event to persistent store for SSE streaming
_ = EmitTimelineEventAsync(eventType, code, message);
}
async Task EmitTimelineEventAsync(string eventType, string? code, string? message)
{
try
{
var attributes = new Dictionary<string, string>(StringComparer.Ordinal)
{
["bundle_id"] = request.BundleId ?? string.Empty,
["mirror_generation"] = request.MirrorGeneration ?? string.Empty
};
if (stalenessSeconds.HasValue)
{
attributes["staleness_seconds"] = stalenessSeconds.Value.ToString(CultureInfo.InvariantCulture);
}
if (!string.IsNullOrEmpty(code))
{
attributes["error_code"] = code;
}
if (!string.IsNullOrEmpty(message))
{
attributes["message"] = message;
}
var eventId = $"airgap-{request.BundleId}-{request.MirrorGeneration}-{nowUtc:yyyyMMddHHmmssfff}";
var streamId = $"airgap:{request.BundleId}:{request.MirrorGeneration}";
var evt = new TimelineEvent(
eventId,
tenantId,
"airgap-import",
streamId,
eventType,
traceId ?? Guid.NewGuid().ToString("N"),
justificationSummary: message ?? string.Empty,
nowUtc,
evidenceHash: null,
payloadHash: request.PayloadHash,
attributes.ToImmutableDictionary());
await timelineEmitter.EmitAsync(evt, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogWarning(ex, "Failed to emit timeline event {EventType} for bundle {BundleId}", eventType, request.BundleId);
}
}
RecordEvent("airgap.import.started");
@@ -528,7 +1052,8 @@ app.MapPost("/airgap/v1/vex/import", async (
var manifestPath = $"mirror/{request.BundleId}/{request.MirrorGeneration}/manifest.json";
var evidenceLockerPath = $"evidence/{request.BundleId}/{request.MirrorGeneration}/bundle.ndjson";
var manifestHash = ComputeSha256($"{request.BundleId}:{request.MirrorGeneration}:{request.PayloadHash}");
// CRYPTO-90-001: Use IVexHashingService for pluggable crypto algorithms
var manifestHash = hashingService.ComputeHash($"{request.BundleId}:{request.MirrorGeneration}:{request.PayloadHash}");
RecordEvent("airgap.import.completed");
@@ -578,12 +1103,7 @@ app.MapPost("/airgap/v1/vex/import", async (
});
});
static string ComputeSha256(string value)
{
var bytes = Encoding.UTF8.GetBytes(value);
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
// CRYPTO-90-001: ComputeSha256 removed - now using IVexHashingService for pluggable crypto
app.MapPost("/v1/attestations/verify", async (
[FromServices] IVexAttestationClient attestationClient,
@@ -1666,10 +2186,13 @@ app.MapGet("/obs/excititor/health", async (
app.MapGet("/obs/excititor/timeline", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IVexTimelineEventStore timelineStore,
TimeProvider timeProvider,
ILoggerFactory loggerFactory,
[FromQuery] string? cursor,
[FromQuery] int? limit,
[FromQuery] string? eventType,
[FromQuery] string? providerId,
CancellationToken cancellationToken) =>
{
if (!TryResolveTenant(context, storageOptions.Value, requireHeader: true, out var tenant, out var tenantError))
@@ -1680,44 +2203,71 @@ app.MapGet("/obs/excititor/timeline", async (
var logger = loggerFactory.CreateLogger("ExcititorTimeline");
var take = Math.Clamp(limit.GetValueOrDefault(10), 1, 100);
var startId = 0;
// Parse cursor as ISO-8601 timestamp or Last-Event-ID header
DateTimeOffset? cursorTimestamp = null;
var candidateCursor = cursor ?? context.Request.Headers["Last-Event-ID"].FirstOrDefault();
if (!string.IsNullOrWhiteSpace(candidateCursor) && !int.TryParse(candidateCursor, NumberStyles.Integer, CultureInfo.InvariantCulture, out startId))
if (!string.IsNullOrWhiteSpace(candidateCursor))
{
return Results.BadRequest(new { error = "cursor must be integer" });
if (DateTimeOffset.TryParse(candidateCursor, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed))
{
cursorTimestamp = parsed;
}
else
{
return Results.BadRequest(new { error = new { code = "ERR_CURSOR", message = "cursor must be ISO-8601 timestamp" } });
}
}
context.Response.Headers.CacheControl = "no-store";
context.Response.Headers["X-Accel-Buffering"] = "no";
context.Response.Headers["Link"] = "</openapi/excititor.json>; rel=\"describedby\"; type=\"application/json\"";
context.Response.ContentType = "text/event-stream";
await context.Response.WriteAsync("retry: 5000\n\n", cancellationToken).ConfigureAwait(false);
// Fetch real timeline events from the store
IReadOnlyList<TimelineEvent> events;
var now = timeProvider.GetUtcNow();
var events = Enumerable.Range(startId, take)
.Select(id => new ExcititorTimelineEvent(
Type: "evidence.update",
Tenant: tenant,
Source: "vex-runtime",
Count: 0,
Errors: 0,
TraceId: null,
OccurredAt: now.ToString("O", CultureInfo.InvariantCulture)))
.ToList();
foreach (var (evt, idx) in events.Select((e, i) => (e, i)))
if (!string.IsNullOrWhiteSpace(eventType))
{
events = await timelineStore.FindByEventTypeAsync(tenant, eventType, take, cancellationToken).ConfigureAwait(false);
}
else if (!string.IsNullOrWhiteSpace(providerId))
{
events = await timelineStore.FindByProviderAsync(tenant, providerId, take, cancellationToken).ConfigureAwait(false);
}
else if (cursorTimestamp.HasValue)
{
// Get events after the cursor timestamp
events = await timelineStore.FindByTimeRangeAsync(tenant, cursorTimestamp.Value, now, take, cancellationToken).ConfigureAwait(false);
}
else
{
events = await timelineStore.GetRecentAsync(tenant, take, cancellationToken).ConfigureAwait(false);
}
foreach (var evt in events)
{
cancellationToken.ThrowIfCancellationRequested();
var id = startId + idx;
await context.Response.WriteAsync($"id: {id}\n", cancellationToken).ConfigureAwait(false);
await context.Response.WriteAsync($"event: {evt.Type}\n", cancellationToken).ConfigureAwait(false);
await context.Response.WriteAsync($"data: {JsonSerializer.Serialize(evt)}\n\n", cancellationToken).ConfigureAwait(false);
var sseEvent = new ExcititorTimelineEvent(
Type: evt.EventType,
Tenant: evt.Tenant,
Source: evt.ProviderId,
Count: evt.Attributes.TryGetValue("observation_count", out var countStr) && int.TryParse(countStr, out var count) ? count : 1,
Errors: evt.Attributes.TryGetValue("error_count", out var errStr) && int.TryParse(errStr, out var errCount) ? errCount : 0,
TraceId: evt.TraceId,
OccurredAt: evt.CreatedAt.ToString("O", CultureInfo.InvariantCulture));
await context.Response.WriteAsync($"id: {evt.CreatedAt:O}\n", cancellationToken).ConfigureAwait(false);
await context.Response.WriteAsync($"event: {evt.EventType}\n", cancellationToken).ConfigureAwait(false);
await context.Response.WriteAsync($"data: {JsonSerializer.Serialize(sseEvent)}\n\n", cancellationToken).ConfigureAwait(false);
}
await context.Response.Body.FlushAsync(cancellationToken).ConfigureAwait(false);
var nextCursor = startId + events.Count;
context.Response.Headers["X-Next-Cursor"] = nextCursor.ToString(CultureInfo.InvariantCulture);
logger.LogInformation("obs excititor timeline emitted {Count} events for tenant {Tenant} start {Start} next {Next}", events.Count, tenant, startId, nextCursor);
var nextCursor = events.Count > 0 ? events[^1].CreatedAt.ToString("O", CultureInfo.InvariantCulture) : now.ToString("O", CultureInfo.InvariantCulture);
context.Response.Headers["X-Next-Cursor"] = nextCursor;
logger.LogInformation("obs excititor timeline emitted {Count} events for tenant {Tenant} cursor {Cursor} next {Next}", events.Count, tenant, candidateCursor, nextCursor);
return Results.Empty;
}).WithName("GetExcititorTimeline");
@@ -1726,11 +2276,13 @@ IngestEndpoints.MapIngestEndpoints(app);
ResolveEndpoint.MapResolveEndpoint(app);
MirrorEndpoints.MapMirrorEndpoints(app);
app.MapGet("/v1/vex/observations", async (HttpContext _, CancellationToken __) =>
Results.StatusCode(StatusCodes.Status501NotImplemented));
// Evidence and Attestation APIs (WEB-OBS-53-001, WEB-OBS-54-001)
EvidenceEndpoints.MapEvidenceEndpoints(app);
AttestationEndpoints.MapAttestationEndpoints(app);
app.MapGet("/v1/vex/linksets", async (HttpContext _, CancellationToken __) =>
Results.StatusCode(StatusCodes.Status501NotImplemented));
// Observation and Linkset APIs (EXCITITOR-LNM-21-201, EXCITITOR-LNM-21-202)
ObservationEndpoints.MapObservationEndpoints(app);
LinksetEndpoints.MapLinksetEndpoints(app);
app.Run();

View File

@@ -0,0 +1,112 @@
using System;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Cryptography;
namespace StellaOps.Excititor.WebService.Services;
/// <summary>
/// Service interface for hashing operations in Excititor (CRYPTO-90-001).
/// Abstracts hashing implementation to support GOST/SM algorithms via ICryptoProviderRegistry.
/// </summary>
public interface IVexHashingService
{
/// <summary>
/// Compute hash of a UTF-8 encoded string.
/// </summary>
string ComputeHash(string value, string algorithm = "sha256");
/// <summary>
/// Compute hash of raw bytes.
/// </summary>
string ComputeHash(ReadOnlySpan<byte> data, string algorithm = "sha256");
/// <summary>
/// Try to compute hash of raw bytes with stack-allocated buffer optimization.
/// </summary>
bool TryComputeHash(ReadOnlySpan<byte> data, Span<byte> destination, out int bytesWritten, string algorithm = "sha256");
/// <summary>
/// Format a hash digest with algorithm prefix.
/// </summary>
string FormatDigest(string algorithm, ReadOnlySpan<byte> digest);
}
/// <summary>
/// Default implementation of <see cref="IVexHashingService"/> that uses ICryptoProviderRegistry
/// when available, falling back to System.Security.Cryptography for SHA-256.
/// </summary>
public sealed class VexHashingService : IVexHashingService
{
private readonly ICryptoProviderRegistry? _registry;
public VexHashingService(ICryptoProviderRegistry? registry = null)
{
_registry = registry;
}
public string ComputeHash(string value, string algorithm = "sha256")
{
if (string.IsNullOrEmpty(value))
{
return string.Empty;
}
var bytes = Encoding.UTF8.GetBytes(value);
return ComputeHash(bytes, algorithm);
}
public string ComputeHash(ReadOnlySpan<byte> data, string algorithm = "sha256")
{
Span<byte> buffer = stackalloc byte[64]; // Large enough for SHA-512 and GOST
if (!TryComputeHash(data, buffer, out var written, algorithm))
{
throw new InvalidOperationException($"Failed to compute {algorithm} hash.");
}
return FormatDigest(algorithm, buffer[..written]);
}
public bool TryComputeHash(ReadOnlySpan<byte> data, Span<byte> destination, out int bytesWritten, string algorithm = "sha256")
{
bytesWritten = 0;
// Try to use crypto provider registry first for pluggable algorithms
if (_registry is not null)
{
try
{
var resolution = _registry.ResolveHasher(algorithm);
var hasher = resolution.Hasher;
var result = hasher.ComputeHash(data);
if (result.Length <= destination.Length)
{
result.CopyTo(destination);
bytesWritten = result.Length;
return true;
}
}
catch
{
// Fall through to built-in implementation
}
}
// Fall back to System.Security.Cryptography for standard algorithms
var normalizedAlgorithm = algorithm.ToLowerInvariant().Replace("-", string.Empty);
return normalizedAlgorithm switch
{
"sha256" => SHA256.TryHashData(data, destination, out bytesWritten),
"sha384" => SHA384.TryHashData(data, destination, out bytesWritten),
"sha512" => SHA512.TryHashData(data, destination, out bytesWritten),
_ => throw new NotSupportedException($"Unsupported hash algorithm: {algorithm}")
};
}
public string FormatDigest(string algorithm, ReadOnlySpan<byte> digest)
{
var normalizedAlgorithm = algorithm.ToLowerInvariant().Replace("-", string.Empty);
var hexDigest = Convert.ToHexString(digest).ToLowerInvariant();
return $"{normalizedAlgorithm}:{hexDigest}";
}
}

View File

@@ -0,0 +1,250 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.WebService.Telemetry;
/// <summary>
/// Telemetry metrics for VEX linkset and observation store operations (EXCITITOR-OBS-51-001).
/// Tracks ingest latency, scope resolution success, conflict rate, and signature verification
/// to support SLO burn alerts for AOC "evidence freshness" mission.
/// </summary>
internal static class LinksetTelemetry
{
public const string MeterName = "StellaOps.Excititor.WebService.Linksets";
private static readonly Meter Meter = new(MeterName);
// Ingest latency metrics
private static readonly Histogram<double> IngestLatencyHistogram =
Meter.CreateHistogram<double>(
"excititor.vex.ingest.latency_seconds",
unit: "s",
description: "Latency distribution for VEX observation and linkset store operations.");
private static readonly Counter<long> IngestOperationCounter =
Meter.CreateCounter<long>(
"excititor.vex.ingest.operations_total",
unit: "operations",
description: "Total count of VEX ingest operations by outcome.");
// Scope resolution metrics
private static readonly Counter<long> ScopeResolutionCounter =
Meter.CreateCounter<long>(
"excititor.vex.scope.resolution_total",
unit: "resolutions",
description: "Count of scope resolution attempts by outcome (success/failure).");
private static readonly Histogram<int> ScopeMatchCountHistogram =
Meter.CreateHistogram<int>(
"excititor.vex.scope.match_count",
unit: "matches",
description: "Distribution of matched scopes per resolution request.");
// Conflict/disagreement metrics
private static readonly Counter<long> LinksetConflictCounter =
Meter.CreateCounter<long>(
"excititor.vex.linkset.conflicts_total",
unit: "conflicts",
description: "Total count of linksets with provider disagreements detected.");
private static readonly Histogram<int> DisagreementCountHistogram =
Meter.CreateHistogram<int>(
"excititor.vex.linkset.disagreement_count",
unit: "disagreements",
description: "Distribution of disagreement count per linkset.");
private static readonly Counter<long> DisagreementByStatusCounter =
Meter.CreateCounter<long>(
"excititor.vex.linkset.disagreement_by_status",
unit: "disagreements",
description: "Disagreement counts broken down by conflicting status values.");
// Observation store metrics
private static readonly Counter<long> ObservationStoreCounter =
Meter.CreateCounter<long>(
"excititor.vex.observation.store_operations_total",
unit: "operations",
description: "Total observation store operations by type and outcome.");
private static readonly Histogram<int> ObservationBatchSizeHistogram =
Meter.CreateHistogram<int>(
"excititor.vex.observation.batch_size",
unit: "observations",
description: "Distribution of observation batch sizes for store operations.");
// Linkset store metrics
private static readonly Counter<long> LinksetStoreCounter =
Meter.CreateCounter<long>(
"excititor.vex.linkset.store_operations_total",
unit: "operations",
description: "Total linkset store operations by type and outcome.");
// Confidence metrics
private static readonly Histogram<double> LinksetConfidenceHistogram =
Meter.CreateHistogram<double>(
"excititor.vex.linkset.confidence_score",
unit: "score",
description: "Distribution of linkset confidence scores (0.0-1.0).");
/// <summary>
/// Records latency for a VEX ingest operation.
/// </summary>
public static void RecordIngestLatency(string? tenant, string operation, string outcome, double latencySeconds)
{
var tags = BuildBaseTags(tenant, operation, outcome);
IngestLatencyHistogram.Record(latencySeconds, tags);
IngestOperationCounter.Add(1, tags);
}
/// <summary>
/// Records a scope resolution attempt and its outcome.
/// </summary>
public static void RecordScopeResolution(string? tenant, string outcome, int matchCount = 0)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("outcome", outcome),
};
ScopeResolutionCounter.Add(1, tags);
if (string.Equals(outcome, "success", StringComparison.OrdinalIgnoreCase) && matchCount > 0)
{
ScopeMatchCountHistogram.Record(matchCount, tags);
}
}
/// <summary>
/// Records conflict detection for a linkset.
/// </summary>
public static void RecordLinksetConflict(string? tenant, bool hasConflicts, int disagreementCount = 0)
{
var normalizedTenant = NormalizeTenant(tenant);
if (hasConflicts)
{
var conflictTags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
};
LinksetConflictCounter.Add(1, conflictTags);
if (disagreementCount > 0)
{
DisagreementCountHistogram.Record(disagreementCount, conflictTags);
}
}
}
/// <summary>
/// Records a linkset with detailed disagreement breakdown.
/// </summary>
public static void RecordLinksetDisagreements(string? tenant, VexLinkset linkset)
{
if (linkset is null || !linkset.HasConflicts)
{
return;
}
var normalizedTenant = NormalizeTenant(tenant);
RecordLinksetConflict(normalizedTenant, true, linkset.Disagreements.Length);
// Record disagreements by status
foreach (var disagreement in linkset.Disagreements)
{
var statusTags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("status", disagreement.Status.ToLowerInvariant()),
new KeyValuePair<string, object?>("provider", disagreement.ProviderId),
};
DisagreementByStatusCounter.Add(1, statusTags);
}
// Record confidence score
var confidenceScore = linkset.Confidence switch
{
VexLinksetConfidence.High => 0.9,
VexLinksetConfidence.Medium => 0.7,
VexLinksetConfidence.Low => 0.4,
_ => 0.5
};
var confidenceTags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("has_conflicts", linkset.HasConflicts),
};
LinksetConfidenceHistogram.Record(confidenceScore, confidenceTags);
}
/// <summary>
/// Records an observation store operation.
/// </summary>
public static void RecordObservationStoreOperation(
string? tenant,
string operation,
string outcome,
int batchSize = 1)
{
var tags = BuildBaseTags(tenant, operation, outcome);
ObservationStoreCounter.Add(1, tags);
if (batchSize > 0 && string.Equals(outcome, "success", StringComparison.OrdinalIgnoreCase))
{
var batchTags = new[]
{
new KeyValuePair<string, object?>("tenant", NormalizeTenant(tenant)),
new KeyValuePair<string, object?>("operation", operation),
};
ObservationBatchSizeHistogram.Record(batchSize, batchTags);
}
}
/// <summary>
/// Records a linkset store operation.
/// </summary>
public static void RecordLinksetStoreOperation(string? tenant, string operation, string outcome)
{
var tags = BuildBaseTags(tenant, operation, outcome);
LinksetStoreCounter.Add(1, tags);
}
/// <summary>
/// Records linkset confidence score distribution.
/// </summary>
public static void RecordLinksetConfidence(string? tenant, VexLinksetConfidence confidence, bool hasConflicts)
{
var score = confidence switch
{
VexLinksetConfidence.High => 0.9,
VexLinksetConfidence.Medium => 0.7,
VexLinksetConfidence.Low => 0.4,
_ => 0.5
};
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", NormalizeTenant(tenant)),
new KeyValuePair<string, object?>("has_conflicts", hasConflicts),
new KeyValuePair<string, object?>("confidence_level", confidence.ToString().ToLowerInvariant()),
};
LinksetConfidenceHistogram.Record(score, tags);
}
private static string NormalizeTenant(string? tenant)
=> string.IsNullOrWhiteSpace(tenant) ? "default" : tenant;
private static KeyValuePair<string, object?>[] BuildBaseTags(string? tenant, string operation, string outcome)
=> new[]
{
new KeyValuePair<string, object?>("tenant", NormalizeTenant(tenant)),
new KeyValuePair<string, object?>("operation", operation),
new KeyValuePair<string, object?>("outcome", outcome),
};
}

View File

@@ -0,0 +1,44 @@
using System;
namespace StellaOps.Excititor.Worker.Options;
/// <summary>
/// Configuration options for the orchestrator worker SDK integration.
/// </summary>
public sealed class VexWorkerOrchestratorOptions
{
/// <summary>
/// Whether orchestrator integration is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Interval between heartbeat emissions during job execution.
/// </summary>
public TimeSpan HeartbeatInterval { get; set; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Minimum heartbeat interval (safety floor).
/// </summary>
public TimeSpan MinHeartbeatInterval { get; set; } = TimeSpan.FromSeconds(5);
/// <summary>
/// Maximum heartbeat interval (safety cap).
/// </summary>
public TimeSpan MaxHeartbeatInterval { get; set; } = TimeSpan.FromMinutes(2);
/// <summary>
/// Enable verbose logging for heartbeat/artifact events.
/// </summary>
public bool EnableVerboseLogging { get; set; }
/// <summary>
/// Maximum number of artifact hashes to retain in state.
/// </summary>
public int MaxArtifactHashes { get; set; } = 1000;
/// <summary>
/// Default tenant for worker jobs when not specified.
/// </summary>
public string DefaultTenant { get; set; } = "default";
}

View File

@@ -0,0 +1,152 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Worker.Options;
namespace StellaOps.Excititor.Worker.Orchestration;
/// <summary>
/// Background service that emits periodic heartbeats during job execution.
/// </summary>
internal sealed class VexWorkerHeartbeatService
{
private readonly IVexWorkerOrchestratorClient _orchestratorClient;
private readonly IOptions<VexWorkerOrchestratorOptions> _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<VexWorkerHeartbeatService> _logger;
public VexWorkerHeartbeatService(
IVexWorkerOrchestratorClient orchestratorClient,
IOptions<VexWorkerOrchestratorOptions> options,
TimeProvider timeProvider,
ILogger<VexWorkerHeartbeatService> logger)
{
_orchestratorClient = orchestratorClient ?? throw new ArgumentNullException(nameof(orchestratorClient));
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Runs the heartbeat loop for the given job context.
/// Call this in a background task while the job is running.
/// </summary>
public async Task RunAsync(
VexWorkerJobContext context,
Func<VexWorkerHeartbeatStatus> statusProvider,
Func<int?> progressProvider,
Func<string?> lastArtifactHashProvider,
Func<string?> lastArtifactKindProvider,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(statusProvider);
if (!_options.Value.Enabled)
{
_logger.LogDebug("Orchestrator heartbeat service disabled; skipping heartbeat loop.");
return;
}
var interval = ComputeInterval();
_logger.LogDebug(
"Starting heartbeat loop for job {RunId} with interval {Interval}",
context.RunId,
interval);
await Task.Yield();
try
{
using var timer = new PeriodicTimer(interval);
// Send initial heartbeat
await SendHeartbeatAsync(
context,
statusProvider(),
progressProvider?.Invoke(),
lastArtifactHashProvider?.Invoke(),
lastArtifactKindProvider?.Invoke(),
cancellationToken).ConfigureAwait(false);
while (await timer.WaitForNextTickAsync(cancellationToken).ConfigureAwait(false))
{
if (cancellationToken.IsCancellationRequested)
{
break;
}
await SendHeartbeatAsync(
context,
statusProvider(),
progressProvider?.Invoke(),
lastArtifactHashProvider?.Invoke(),
lastArtifactKindProvider?.Invoke(),
cancellationToken).ConfigureAwait(false);
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
_logger.LogDebug("Heartbeat loop cancelled for job {RunId}", context.RunId);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Heartbeat loop error for job {RunId}: {Message}",
context.RunId,
ex.Message);
}
}
private async Task SendHeartbeatAsync(
VexWorkerJobContext context,
VexWorkerHeartbeatStatus status,
int? progress,
string? lastArtifactHash,
string? lastArtifactKind,
CancellationToken cancellationToken)
{
try
{
var heartbeat = new VexWorkerHeartbeat(
status,
progress,
QueueDepth: null,
lastArtifactHash,
lastArtifactKind,
ErrorCode: null,
RetryAfterSeconds: null);
await _orchestratorClient.SendHeartbeatAsync(context, heartbeat, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to send heartbeat for job {RunId}: {Message}",
context.RunId,
ex.Message);
}
}
private TimeSpan ComputeInterval()
{
var opts = _options.Value;
var interval = opts.HeartbeatInterval;
if (interval < opts.MinHeartbeatInterval)
{
interval = opts.MinHeartbeatInterval;
}
else if (interval > opts.MaxHeartbeatInterval)
{
interval = opts.MaxHeartbeatInterval;
}
return interval;
}
}

View File

@@ -0,0 +1,328 @@
using System;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Worker.Options;
namespace StellaOps.Excititor.Worker.Orchestration;
/// <summary>
/// Default implementation of <see cref="IVexWorkerOrchestratorClient"/>.
/// Stores heartbeats and artifacts locally and emits them to the orchestrator registry when configured.
/// </summary>
internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
{
private readonly IVexConnectorStateRepository _stateRepository;
private readonly TimeProvider _timeProvider;
private readonly IOptions<VexWorkerOrchestratorOptions> _options;
private readonly ILogger<VexWorkerOrchestratorClient> _logger;
public VexWorkerOrchestratorClient(
IVexConnectorStateRepository stateRepository,
TimeProvider timeProvider,
IOptions<VexWorkerOrchestratorOptions> options,
ILogger<VexWorkerOrchestratorClient> logger)
{
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ValueTask<VexWorkerJobContext> StartJobAsync(
string tenant,
string connectorId,
string? checkpoint,
CancellationToken cancellationToken = default)
{
var runId = Guid.NewGuid();
var startedAt = _timeProvider.GetUtcNow();
var context = new VexWorkerJobContext(tenant, connectorId, runId, checkpoint, startedAt);
_logger.LogInformation(
"Orchestrator job started: tenant={Tenant} connector={ConnectorId} runId={RunId} checkpoint={Checkpoint}",
tenant,
connectorId,
runId,
checkpoint ?? "(none)");
return ValueTask.FromResult(context);
}
public async ValueTask SendHeartbeatAsync(
VexWorkerJobContext context,
VexWorkerHeartbeat heartbeat,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(heartbeat);
var sequence = context.NextSequence();
var timestamp = _timeProvider.GetUtcNow();
// Update state with heartbeat info
var state = await _stateRepository.GetAsync(context.ConnectorId, cancellationToken).ConfigureAwait(false)
?? new VexConnectorState(context.ConnectorId, null, ImmutableArray<string>.Empty);
var updated = state with
{
LastHeartbeatAt = timestamp,
LastHeartbeatStatus = heartbeat.Status.ToString()
};
await _stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false);
if (_options.Value.EnableVerboseLogging)
{
_logger.LogDebug(
"Orchestrator heartbeat: runId={RunId} seq={Sequence} status={Status} progress={Progress} artifact={ArtifactHash}",
context.RunId,
sequence,
heartbeat.Status,
heartbeat.Progress,
heartbeat.LastArtifactHash);
}
}
public async ValueTask RecordArtifactAsync(
VexWorkerJobContext context,
VexWorkerArtifact artifact,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(artifact);
// Track artifact hash in connector state for determinism verification
var state = await _stateRepository.GetAsync(context.ConnectorId, cancellationToken).ConfigureAwait(false)
?? new VexConnectorState(context.ConnectorId, null, ImmutableArray<string>.Empty);
var digests = state.DocumentDigests.IsDefault
? ImmutableArray<string>.Empty
: state.DocumentDigests;
// Add artifact hash if not already tracked (cap to avoid unbounded growth)
const int maxDigests = 1000;
if (!digests.Contains(artifact.Hash))
{
digests = digests.Length >= maxDigests
? digests.RemoveAt(0).Add(artifact.Hash)
: digests.Add(artifact.Hash);
}
var updated = state with
{
DocumentDigests = digests,
LastArtifactHash = artifact.Hash,
LastArtifactKind = artifact.Kind
};
await _stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Orchestrator artifact recorded: runId={RunId} hash={Hash} kind={Kind} provider={Provider}",
context.RunId,
artifact.Hash,
artifact.Kind,
artifact.ProviderId);
}
public async ValueTask CompleteJobAsync(
VexWorkerJobContext context,
VexWorkerJobResult result,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(result);
var state = await _stateRepository.GetAsync(context.ConnectorId, cancellationToken).ConfigureAwait(false)
?? new VexConnectorState(context.ConnectorId, null, ImmutableArray<string>.Empty);
var updated = state with
{
LastUpdated = result.CompletedAt,
LastSuccessAt = result.CompletedAt,
LastHeartbeatAt = result.CompletedAt,
LastHeartbeatStatus = VexWorkerHeartbeatStatus.Succeeded.ToString(),
LastArtifactHash = result.LastArtifactHash,
LastCheckpoint = result.LastCheckpoint,
FailureCount = 0,
NextEligibleRun = null,
LastFailureReason = null
};
await _stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false);
var duration = result.CompletedAt - context.StartedAt;
_logger.LogInformation(
"Orchestrator job completed: runId={RunId} connector={ConnectorId} documents={Documents} claims={Claims} duration={Duration}",
context.RunId,
context.ConnectorId,
result.DocumentsProcessed,
result.ClaimsGenerated,
duration);
}
public async ValueTask FailJobAsync(
VexWorkerJobContext context,
string errorCode,
string? errorMessage,
int? retryAfterSeconds,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var now = _timeProvider.GetUtcNow();
var state = await _stateRepository.GetAsync(context.ConnectorId, cancellationToken).ConfigureAwait(false)
?? new VexConnectorState(context.ConnectorId, null, ImmutableArray<string>.Empty);
var failureCount = state.FailureCount + 1;
var nextEligible = retryAfterSeconds.HasValue
? now.AddSeconds(retryAfterSeconds.Value)
: (DateTimeOffset?)null;
var updated = state with
{
LastHeartbeatAt = now,
LastHeartbeatStatus = VexWorkerHeartbeatStatus.Failed.ToString(),
FailureCount = failureCount,
NextEligibleRun = nextEligible,
LastFailureReason = Truncate($"{errorCode}: {errorMessage}", 512)
};
await _stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false);
_logger.LogWarning(
"Orchestrator job failed: runId={RunId} connector={ConnectorId} error={ErrorCode} retryAfter={RetryAfter}s",
context.RunId,
context.ConnectorId,
errorCode,
retryAfterSeconds);
}
public ValueTask FailJobAsync(
VexWorkerJobContext context,
VexWorkerError error,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(error);
_logger.LogDebug(
"Orchestrator job failed with classified error: runId={RunId} code={Code} category={Category} retryable={Retryable}",
context.RunId,
error.Code,
error.Category,
error.Retryable);
return FailJobAsync(
context,
error.Code,
error.Message,
error.Retryable ? error.RetryAfterSeconds : null,
cancellationToken);
}
public ValueTask<VexWorkerCommand?> GetPendingCommandAsync(
VexWorkerJobContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
// In this local implementation, commands are not externally sourced.
// Return Continue to indicate normal processing should continue.
// A full orchestrator integration would poll a command queue here.
if (!_options.Value.Enabled)
{
return ValueTask.FromResult<VexWorkerCommand?>(null);
}
// No pending commands in local mode
return ValueTask.FromResult<VexWorkerCommand?>(null);
}
public ValueTask AcknowledgeCommandAsync(
VexWorkerJobContext context,
long commandSequence,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
_logger.LogDebug(
"Orchestrator command acknowledged: runId={RunId} sequence={Sequence}",
context.RunId,
commandSequence);
// In local mode, acknowledgment is a no-op
return ValueTask.CompletedTask;
}
public async ValueTask SaveCheckpointAsync(
VexWorkerJobContext context,
VexWorkerCheckpoint checkpoint,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(checkpoint);
var now = _timeProvider.GetUtcNow();
var state = await _stateRepository.GetAsync(context.ConnectorId, cancellationToken).ConfigureAwait(false)
?? new VexConnectorState(context.ConnectorId, null, ImmutableArray<string>.Empty);
var updated = state with
{
LastCheckpoint = checkpoint.Cursor,
LastUpdated = checkpoint.LastProcessedAt ?? now,
DocumentDigests = checkpoint.ProcessedDigests.IsDefault
? ImmutableArray<string>.Empty
: checkpoint.ProcessedDigests,
ResumeTokens = checkpoint.ResumeTokens.IsEmpty
? ImmutableDictionary<string, string>.Empty
: checkpoint.ResumeTokens
};
await _stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Orchestrator checkpoint saved: runId={RunId} connector={ConnectorId} cursor={Cursor} digests={DigestCount}",
context.RunId,
context.ConnectorId,
checkpoint.Cursor ?? "(none)",
checkpoint.ProcessedDigests.Length);
}
public async ValueTask<VexWorkerCheckpoint?> LoadCheckpointAsync(
string connectorId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
var state = await _stateRepository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false);
if (state is null)
{
return null;
}
return new VexWorkerCheckpoint(
connectorId,
state.LastCheckpoint,
state.LastUpdated,
state.DocumentDigests.IsDefault ? ImmutableArray<string>.Empty : state.DocumentDigests,
state.ResumeTokens.IsEmpty ? ImmutableDictionary<string, string>.Empty : state.ResumeTokens);
}
private static string Truncate(string? value, int maxLength)
{
if (string.IsNullOrEmpty(value))
{
return string.Empty;
}
return value.Length <= maxLength
? value
: value[..maxLength];
}
}

View File

@@ -1,25 +1,27 @@
using System.IO;
using System.Linq;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using System.IO;
using System.Linq;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Plugin;
using StellaOps.Excititor.Connectors.RedHat.CSAF.DependencyInjection;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Aoc;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Formats.CSAF;
using StellaOps.Excititor.Formats.CycloneDX;
using StellaOps.Excititor.Formats.OpenVEX;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Worker.Auth;
using StellaOps.Excititor.Worker.Options;
using StellaOps.Excititor.Worker.Orchestration;
using StellaOps.Excititor.Worker.Scheduling;
using StellaOps.Excititor.Worker.Signature;
using StellaOps.Excititor.Attestation.Extensions;
using StellaOps.Excititor.Attestation.Verification;
using StellaOps.IssuerDirectory.Client;
var builder = Host.CreateApplicationBuilder(args);
var services = builder.Services;
var configuration = builder.Configuration;
@@ -40,11 +42,11 @@ services.PostConfigure<VexWorkerOptions>(options =>
}
});
services.AddRedHatCsafConnector();
services.AddOptions<VexMongoStorageOptions>()
.Bind(configuration.GetSection("Excititor:Storage:Mongo"))
.ValidateOnStart();
services.AddOptions<VexMongoStorageOptions>()
.Bind(configuration.GetSection("Excititor:Storage:Mongo"))
.ValidateOnStart();
services.AddExcititorMongoStorage();
services.AddCsafNormalizer();
services.AddCycloneDxNormalizer();
@@ -71,38 +73,45 @@ services.PostConfigure<VexAttestationVerificationOptions>(options =>
}
});
services.AddExcititorAocGuards();
services.AddSingleton<IValidateOptions<VexWorkerOptions>, VexWorkerOptionsValidator>();
services.AddSingleton(TimeProvider.System);
services.PostConfigure<VexWorkerOptions>(options =>
{
if (!options.Providers.Any(provider => string.Equals(provider.ProviderId, "excititor:redhat", StringComparison.OrdinalIgnoreCase)))
{
options.Providers.Add(new VexWorkerProviderOptions
{
ProviderId = "excititor:redhat",
});
}
});
services.AddSingleton<IValidateOptions<VexWorkerOptions>, VexWorkerOptionsValidator>();
services.AddSingleton(TimeProvider.System);
services.PostConfigure<VexWorkerOptions>(options =>
{
if (!options.Providers.Any(provider => string.Equals(provider.ProviderId, "excititor:redhat", StringComparison.OrdinalIgnoreCase)))
{
options.Providers.Add(new VexWorkerProviderOptions
{
ProviderId = "excititor:redhat",
});
}
});
services.AddSingleton<PluginCatalog>(provider =>
{
var pluginOptions = provider.GetRequiredService<IOptions<VexWorkerPluginOptions>>().Value;
var catalog = new PluginCatalog();
var directory = pluginOptions.ResolveDirectory();
if (Directory.Exists(directory))
{
catalog.AddFromDirectory(directory, pluginOptions.ResolveSearchPattern());
}
else
{
var logger = provider.GetRequiredService<ILogger<Program>>();
logger.LogWarning("Excititor worker plugin directory '{Directory}' does not exist; proceeding without external connectors.", directory);
}
return catalog;
var directory = pluginOptions.ResolveDirectory();
if (Directory.Exists(directory))
{
catalog.AddFromDirectory(directory, pluginOptions.ResolveSearchPattern());
}
else
{
var logger = provider.GetRequiredService<ILogger<Program>>();
logger.LogWarning("Excititor worker plugin directory '{Directory}' does not exist; proceeding without external connectors.", directory);
}
return catalog;
});
// Orchestrator worker SDK integration
services.AddOptions<VexWorkerOrchestratorOptions>()
.Bind(configuration.GetSection("Excititor:Worker:Orchestrator"))
.ValidateOnStart();
services.AddSingleton<IVexWorkerOrchestratorClient, VexWorkerOrchestratorClient>();
services.AddSingleton<VexWorkerHeartbeatService>();
services.AddSingleton<IVexProviderRunner, DefaultVexProviderRunner>();
services.AddHostedService<VexWorkerHostedService>();
if (!workerConfigSnapshot.DisableConsensus)
@@ -115,5 +124,5 @@ services.AddSingleton<ITenantAuthorityClientFactory, TenantAuthorityClientFactor
var host = builder.Build();
await host.RunAsync();
public partial class Program;
public partial class Program;

View File

@@ -9,8 +9,10 @@ using MongoDB.Driver;
using StellaOps.Plugin;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Worker.Options;
using StellaOps.Excititor.Worker.Orchestration;
using StellaOps.Excititor.Worker.Signature;
namespace StellaOps.Excititor.Worker.Scheduling;
@@ -19,19 +21,27 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner
{
private readonly IServiceProvider _serviceProvider;
private readonly PluginCatalog _pluginCatalog;
private readonly IVexWorkerOrchestratorClient _orchestratorClient;
private readonly VexWorkerHeartbeatService _heartbeatService;
private readonly ILogger<DefaultVexProviderRunner> _logger;
private readonly TimeProvider _timeProvider;
private readonly VexWorkerRetryOptions _retryOptions;
private readonly VexWorkerOrchestratorOptions _orchestratorOptions;
public DefaultVexProviderRunner(
IServiceProvider serviceProvider,
PluginCatalog pluginCatalog,
IVexWorkerOrchestratorClient orchestratorClient,
VexWorkerHeartbeatService heartbeatService,
ILogger<DefaultVexProviderRunner> logger,
TimeProvider timeProvider,
IOptions<VexWorkerOptions> workerOptions)
IOptions<VexWorkerOptions> workerOptions,
IOptions<VexWorkerOrchestratorOptions> orchestratorOptions)
{
_serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider));
_pluginCatalog = pluginCatalog ?? throw new ArgumentNullException(nameof(pluginCatalog));
_orchestratorClient = orchestratorClient ?? throw new ArgumentNullException(nameof(orchestratorClient));
_heartbeatService = heartbeatService ?? throw new ArgumentNullException(nameof(heartbeatService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
if (workerOptions is null)
@@ -40,6 +50,7 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner
}
_retryOptions = workerOptions.Value?.Retry ?? throw new InvalidOperationException("VexWorkerOptions.Retry must be configured.");
_orchestratorOptions = orchestratorOptions?.Value ?? new VexWorkerOrchestratorOptions();
}
public async ValueTask RunAsync(VexWorkerSchedule schedule, CancellationToken cancellationToken)
@@ -118,7 +129,7 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner
var verifyingSink = new VerifyingVexRawDocumentSink(rawStore, signatureVerifier);
var context = new VexConnectorContext(
var connectorContext = new VexConnectorContext(
Since: stateBeforeRun?.LastUpdated,
Settings: effectiveSettings,
RawSink: verifyingSink,
@@ -127,33 +138,128 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner
Services: scopeProvider,
ResumeTokens: stateBeforeRun?.ResumeTokens ?? ImmutableDictionary<string, string>.Empty);
// Start orchestrator job for heartbeat/progress tracking
var jobContext = await _orchestratorClient.StartJobAsync(
_orchestratorOptions.DefaultTenant,
connector.Id,
stateBeforeRun?.LastCheckpoint,
cancellationToken).ConfigureAwait(false);
var documentCount = 0;
string? lastArtifactHash = null;
string? lastArtifactKind = null;
var currentStatus = VexWorkerHeartbeatStatus.Running;
// Start heartbeat loop in background
using var heartbeatCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
var heartbeatTask = _heartbeatService.RunAsync(
jobContext,
() => currentStatus,
() => null, // Progress not tracked at document level
() => lastArtifactHash,
() => lastArtifactKind,
heartbeatCts.Token);
try
{
await foreach (var document in connector.FetchAsync(context, cancellationToken).ConfigureAwait(false))
await foreach (var document in connector.FetchAsync(connectorContext, cancellationToken).ConfigureAwait(false))
{
documentCount++;
lastArtifactHash = document.Digest;
lastArtifactKind = "vex-raw-document";
// Record artifact for determinism tracking
if (_orchestratorOptions.Enabled)
{
var artifact = new VexWorkerArtifact(
document.Digest,
"vex-raw-document",
connector.Id,
document.Digest,
_timeProvider.GetUtcNow());
await _orchestratorClient.RecordArtifactAsync(jobContext, artifact, cancellationToken).ConfigureAwait(false);
}
}
// Stop heartbeat loop
currentStatus = VexWorkerHeartbeatStatus.Succeeded;
await heartbeatCts.CancelAsync().ConfigureAwait(false);
await SafeWaitForTaskAsync(heartbeatTask).ConfigureAwait(false);
_logger.LogInformation(
"Connector {ConnectorId} persisted {DocumentCount} raw document(s) this run.",
connector.Id,
documentCount);
await UpdateSuccessStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
// Complete orchestrator job
var completedAt = _timeProvider.GetUtcNow();
var result = new VexWorkerJobResult(
documentCount,
ClaimsGenerated: 0, // Claims generated in separate normalization pass
lastArtifactHash,
lastArtifactHash,
completedAt);
await _orchestratorClient.CompleteJobAsync(jobContext, result, cancellationToken).ConfigureAwait(false);
await UpdateSuccessStateAsync(stateRepository, descriptor.Id, completedAt, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
currentStatus = VexWorkerHeartbeatStatus.Failed;
await heartbeatCts.CancelAsync().ConfigureAwait(false);
await SafeWaitForTaskAsync(heartbeatTask).ConfigureAwait(false);
var error = VexWorkerError.Cancelled("Operation cancelled by host");
await _orchestratorClient.FailJobAsync(jobContext, error, CancellationToken.None).ConfigureAwait(false);
throw;
}
catch (Exception ex)
{
await UpdateFailureStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), ex, cancellationToken).ConfigureAwait(false);
currentStatus = VexWorkerHeartbeatStatus.Failed;
await heartbeatCts.CancelAsync().ConfigureAwait(false);
await SafeWaitForTaskAsync(heartbeatTask).ConfigureAwait(false);
// Classify the error for appropriate retry handling
var classifiedError = VexWorkerError.FromException(ex, stage: "fetch");
// Apply backoff delay for retryable errors
var retryDelay = classifiedError.Retryable
? (int)CalculateDelayWithJitter(1).TotalSeconds
: (int?)null;
var errorWithRetry = classifiedError.Retryable && retryDelay.HasValue
? new VexWorkerError(
classifiedError.Code,
classifiedError.Category,
classifiedError.Message,
classifiedError.Retryable,
retryDelay,
classifiedError.Stage,
classifiedError.Details)
: classifiedError;
await _orchestratorClient.FailJobAsync(jobContext, errorWithRetry, CancellationToken.None).ConfigureAwait(false);
await UpdateFailureStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), ex, classifiedError.Retryable, cancellationToken).ConfigureAwait(false);
throw;
}
}
private static async Task SafeWaitForTaskAsync(Task task)
{
try
{
await task.ConfigureAwait(false);
}
catch (OperationCanceledException)
{
// Expected when cancellation is requested
}
}
private async Task UpdateSuccessStateAsync(
IVexConnectorStateRepository stateRepository,
string connectorId,
@@ -179,33 +285,45 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner
string connectorId,
DateTimeOffset failureTime,
Exception exception,
bool retryable,
CancellationToken cancellationToken)
{
var current = await stateRepository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false)
?? new VexConnectorState(connectorId, null, ImmutableArray<string>.Empty);
var failureCount = current.FailureCount + 1;
var delay = CalculateDelayWithJitter(failureCount);
var nextEligible = failureTime + delay;
DateTimeOffset? nextEligible;
if (failureCount >= _retryOptions.FailureThreshold)
if (retryable)
{
var quarantineUntil = failureTime + _retryOptions.QuarantineDuration;
if (quarantineUntil > nextEligible)
// Apply exponential backoff for retryable errors
var delay = CalculateDelayWithJitter(failureCount);
nextEligible = failureTime + delay;
if (failureCount >= _retryOptions.FailureThreshold)
{
nextEligible = quarantineUntil;
var quarantineUntil = failureTime + _retryOptions.QuarantineDuration;
if (quarantineUntil > nextEligible)
{
nextEligible = quarantineUntil;
}
}
var retryCap = failureTime + _retryOptions.RetryCap;
if (nextEligible > retryCap)
{
nextEligible = retryCap;
}
if (nextEligible < failureTime)
{
nextEligible = failureTime;
}
}
var retryCap = failureTime + _retryOptions.RetryCap;
if (nextEligible > retryCap)
else
{
nextEligible = retryCap;
}
if (nextEligible < failureTime)
{
nextEligible = failureTime;
// Non-retryable errors: apply quarantine immediately
nextEligible = failureTime + _retryOptions.QuarantineDuration;
}
var updated = current with
@@ -219,9 +337,10 @@ internal sealed class DefaultVexProviderRunner : IVexProviderRunner
_logger.LogWarning(
exception,
"Connector {ConnectorId} failed (attempt {Attempt}). Next eligible run at {NextEligible:O}.",
"Connector {ConnectorId} failed (attempt {Attempt}, retryable={Retryable}). Next eligible run at {NextEligible:O}.",
connectorId,
failureCount,
retryable,
nextEligible);
}

View File

@@ -0,0 +1,247 @@
using System;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Attestation.Dsse;
using StellaOps.Excititor.Attestation.Signing;
using StellaOps.Excititor.Core.Evidence;
namespace StellaOps.Excititor.Attestation.Evidence;
/// <summary>
/// Default implementation of <see cref="IVexEvidenceAttestor"/> that creates DSSE attestations for evidence manifests.
/// </summary>
public sealed class VexEvidenceAttestor : IVexEvidenceAttestor
{
internal const string PayloadType = "application/vnd.in-toto+json";
private readonly IVexSigner _signer;
private readonly TimeProvider _timeProvider;
private readonly ILogger<VexEvidenceAttestor> _logger;
private readonly JsonSerializerOptions _serializerOptions;
public VexEvidenceAttestor(
IVexSigner signer,
ILogger<VexEvidenceAttestor> logger,
TimeProvider? timeProvider = null)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_serializerOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
_serializerOptions.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase));
}
public async ValueTask<VexEvidenceAttestationResult> AttestManifestAsync(
VexLockerManifest manifest,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifest);
var attestedAt = _timeProvider.GetUtcNow();
var attestationId = CreateAttestationId(manifest, attestedAt);
// Build in-toto statement
var predicate = VexEvidenceAttestationPredicate.FromManifest(manifest);
var subject = new VexEvidenceInTotoSubject(
manifest.ManifestId,
ImmutableDictionary<string, string>.Empty.Add("sha256", manifest.MerkleRoot.Replace("sha256:", "")));
var statement = new InTotoStatementDto
{
Type = VexEvidenceInTotoStatement.InTotoStatementType,
PredicateType = VexEvidenceInTotoStatement.EvidenceLockerPredicateType,
Subject = new[] { new InTotoSubjectDto { Name = subject.Name, Digest = subject.Digest } },
Predicate = new InTotoPredicateDto
{
ManifestId = predicate.ManifestId,
Tenant = predicate.Tenant,
MerkleRoot = predicate.MerkleRoot,
ItemCount = predicate.ItemCount,
CreatedAt = predicate.CreatedAt,
Metadata = predicate.Metadata.Count > 0 ? predicate.Metadata : null
}
};
// Serialize and sign
var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, _serializerOptions);
var signatureResult = await _signer.SignAsync(payloadBytes, cancellationToken).ConfigureAwait(false);
// Build DSSE envelope
var envelope = new DsseEnvelope(
Convert.ToBase64String(payloadBytes),
PayloadType,
new[] { new DsseSignature(signatureResult.Signature, signatureResult.KeyId) });
var envelopeJson = JsonSerializer.Serialize(envelope, _serializerOptions);
var envelopeHash = ComputeHash(envelopeJson);
// Create signed manifest
var signedManifest = manifest.WithSignature(signatureResult.Signature);
_logger.LogDebug(
"Evidence attestation created for manifest {ManifestId}: attestation={AttestationId} hash={Hash}",
manifest.ManifestId,
attestationId,
envelopeHash);
return new VexEvidenceAttestationResult(
signedManifest,
envelopeJson,
envelopeHash,
attestationId,
attestedAt);
}
public ValueTask<VexEvidenceVerificationResult> VerifyAttestationAsync(
VexLockerManifest manifest,
string dsseEnvelopeJson,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifest);
if (string.IsNullOrWhiteSpace(dsseEnvelopeJson))
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure("DSSE envelope is required."));
}
try
{
var envelope = JsonSerializer.Deserialize<DsseEnvelope>(dsseEnvelopeJson, _serializerOptions);
if (envelope is null)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure("Invalid DSSE envelope format."));
}
// Decode payload and verify it matches the manifest
var payloadBytes = Convert.FromBase64String(envelope.Payload);
var statement = JsonSerializer.Deserialize<InTotoStatementDto>(payloadBytes, _serializerOptions);
if (statement is null)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure("Invalid in-toto statement format."));
}
// Verify statement type
if (statement.Type != VexEvidenceInTotoStatement.InTotoStatementType)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure(
$"Invalid statement type: expected {VexEvidenceInTotoStatement.InTotoStatementType}, got {statement.Type}"));
}
// Verify predicate type
if (statement.PredicateType != VexEvidenceInTotoStatement.EvidenceLockerPredicateType)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure(
$"Invalid predicate type: expected {VexEvidenceInTotoStatement.EvidenceLockerPredicateType}, got {statement.PredicateType}"));
}
// Verify manifest ID matches
if (statement.Predicate?.ManifestId != manifest.ManifestId)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure(
$"Manifest ID mismatch: expected {manifest.ManifestId}, got {statement.Predicate?.ManifestId}"));
}
// Verify Merkle root matches
if (statement.Predicate?.MerkleRoot != manifest.MerkleRoot)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure(
$"Merkle root mismatch: expected {manifest.MerkleRoot}, got {statement.Predicate?.MerkleRoot}"));
}
// Verify item count matches
if (statement.Predicate?.ItemCount != manifest.Items.Length)
{
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure(
$"Item count mismatch: expected {manifest.Items.Length}, got {statement.Predicate?.ItemCount}"));
}
var diagnostics = ImmutableDictionary.CreateBuilder<string, string>();
diagnostics.Add("envelope_hash", ComputeHash(dsseEnvelopeJson));
diagnostics.Add("verified_at", _timeProvider.GetUtcNow().ToString("O"));
_logger.LogDebug("Evidence attestation verified for manifest {ManifestId}", manifest.ManifestId);
return ValueTask.FromResult(VexEvidenceVerificationResult.Success(diagnostics.ToImmutable()));
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to parse DSSE envelope for manifest {ManifestId}", manifest.ManifestId);
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure($"JSON parse error: {ex.Message}"));
}
catch (FormatException ex)
{
_logger.LogWarning(ex, "Failed to decode base64 payload for manifest {ManifestId}", manifest.ManifestId);
return ValueTask.FromResult(VexEvidenceVerificationResult.Failure($"Base64 decode error: {ex.Message}"));
}
}
private static string CreateAttestationId(VexLockerManifest manifest, DateTimeOffset timestamp)
{
var normalized = manifest.Tenant.ToLowerInvariant();
var date = timestamp.ToString("yyyyMMddHHmmssfff");
return $"attest:evidence:{normalized}:{date}";
}
private static string ComputeHash(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
// DTOs for JSON serialization
private sealed record InTotoStatementDto
{
[JsonPropertyName("_type")]
public string? Type { get; init; }
[JsonPropertyName("predicateType")]
public string? PredicateType { get; init; }
[JsonPropertyName("subject")]
public InTotoSubjectDto[]? Subject { get; init; }
[JsonPropertyName("predicate")]
public InTotoPredicateDto? Predicate { get; init; }
}
private sealed record InTotoSubjectDto
{
[JsonPropertyName("name")]
public string? Name { get; init; }
[JsonPropertyName("digest")]
public ImmutableDictionary<string, string>? Digest { get; init; }
}
private sealed record InTotoPredicateDto
{
[JsonPropertyName("manifestId")]
public string? ManifestId { get; init; }
[JsonPropertyName("tenant")]
public string? Tenant { get; init; }
[JsonPropertyName("merkleRoot")]
public string? MerkleRoot { get; init; }
[JsonPropertyName("itemCount")]
public int? ItemCount { get; init; }
[JsonPropertyName("createdAt")]
public DateTimeOffset? CreatedAt { get; init; }
[JsonPropertyName("metadata")]
public ImmutableDictionary<string, string>? Metadata { get; init; }
}
}

View File

@@ -1,8 +1,10 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Excititor.Attestation.Dsse;
using StellaOps.Excititor.Attestation.Evidence;
using StellaOps.Excititor.Attestation.Transparency;
using StellaOps.Excititor.Attestation.Verification;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Evidence;
namespace StellaOps.Excititor.Attestation.Extensions;
@@ -14,14 +16,15 @@ public static class VexAttestationServiceCollectionExtensions
services.AddSingleton<VexAttestationMetrics>();
services.AddSingleton<IVexAttestationVerifier, VexAttestationVerifier>();
services.AddSingleton<IVexAttestationClient, VexAttestationClient>();
services.AddSingleton<IVexEvidenceAttestor, VexEvidenceAttestor>();
return services;
}
public static IServiceCollection AddVexRekorClient(this IServiceCollection services, Action<RekorHttpClientOptions> configure)
{
ArgumentNullException.ThrowIfNull(configure);
services.Configure(configure);
services.AddHttpClient<ITransparencyLogClient, RekorHttpClient>();
return services;
}
}
public static IServiceCollection AddVexRekorClient(this IServiceCollection services, Action<RekorHttpClientOptions> configure)
{
ArgumentNullException.ThrowIfNull(configure);
services.Configure(configure);
services.AddHttpClient<ITransparencyLogClient, RekorHttpClient>();
return services;
}
}

View File

@@ -0,0 +1,314 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
namespace StellaOps.Excititor.Core.Canonicalization;
/// <summary>
/// Canonicalizes advisory and vulnerability identifiers to a stable <see cref="VexCanonicalAdvisoryKey"/>.
/// Preserves original identifiers in the Links collection for traceability.
/// </summary>
public sealed class VexAdvisoryKeyCanonicalizer
{
private static readonly Regex CvePattern = new(
@"^CVE-\d{4}-\d{4,}$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
private static readonly Regex GhsaPattern = new(
@"^GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
private static readonly Regex RhsaPattern = new(
@"^RH[A-Z]{2}-\d{4}:\d+$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
private static readonly Regex DsaPattern = new(
@"^DSA-\d+(-\d+)?$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
private static readonly Regex UsnPattern = new(
@"^USN-\d+(-\d+)?$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
private static readonly Regex MsrcPattern = new(
@"^(ADV|CVE)-\d{4}-\d+$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
/// <summary>
/// Canonicalizes an advisory identifier and extracts scope metadata.
/// </summary>
/// <param name="originalId">The original advisory/vulnerability identifier.</param>
/// <param name="aliases">Optional alias identifiers to include in links.</param>
/// <returns>A canonical advisory key with preserved original links.</returns>
public VexCanonicalAdvisoryKey Canonicalize(string originalId, IEnumerable<string>? aliases = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(originalId);
var normalized = originalId.Trim().ToUpperInvariant();
var scope = DetermineScope(normalized);
var canonicalKey = BuildCanonicalKey(normalized, scope);
var linksBuilder = ImmutableArray.CreateBuilder<VexAdvisoryLink>();
// Add the original identifier as a link
linksBuilder.Add(new VexAdvisoryLink(
originalId.Trim(),
DetermineIdType(normalized),
isOriginal: true));
// Add aliases as links
if (aliases is not null)
{
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase) { normalized };
foreach (var alias in aliases)
{
if (string.IsNullOrWhiteSpace(alias))
{
continue;
}
var normalizedAlias = alias.Trim();
if (!seen.Add(normalizedAlias.ToUpperInvariant()))
{
continue;
}
linksBuilder.Add(new VexAdvisoryLink(
normalizedAlias,
DetermineIdType(normalizedAlias.ToUpperInvariant()),
isOriginal: false));
}
}
return new VexCanonicalAdvisoryKey(
canonicalKey,
scope,
linksBuilder.ToImmutable());
}
/// <summary>
/// Extracts CVE identifier from aliases if the original is not a CVE.
/// </summary>
public string? ExtractCveFromAliases(IEnumerable<string>? aliases)
{
if (aliases is null)
{
return null;
}
foreach (var alias in aliases)
{
if (string.IsNullOrWhiteSpace(alias))
{
continue;
}
var normalized = alias.Trim().ToUpperInvariant();
if (CvePattern.IsMatch(normalized))
{
return normalized;
}
}
return null;
}
private static VexAdvisoryScope DetermineScope(string normalizedId)
{
if (CvePattern.IsMatch(normalizedId))
{
return VexAdvisoryScope.Global;
}
if (GhsaPattern.IsMatch(normalizedId))
{
return VexAdvisoryScope.Ecosystem;
}
if (RhsaPattern.IsMatch(normalizedId))
{
return VexAdvisoryScope.Vendor;
}
if (DsaPattern.IsMatch(normalizedId) || UsnPattern.IsMatch(normalizedId))
{
return VexAdvisoryScope.Distribution;
}
if (MsrcPattern.IsMatch(normalizedId))
{
return VexAdvisoryScope.Vendor;
}
return VexAdvisoryScope.Unknown;
}
private static string BuildCanonicalKey(string normalizedId, VexAdvisoryScope scope)
{
// CVE is the most authoritative global identifier
if (CvePattern.IsMatch(normalizedId))
{
return normalizedId;
}
// For non-CVE identifiers, prefix with scope indicator for disambiguation
var prefix = scope switch
{
VexAdvisoryScope.Ecosystem => "ECO",
VexAdvisoryScope.Vendor => "VND",
VexAdvisoryScope.Distribution => "DST",
_ => "UNK",
};
return $"{prefix}:{normalizedId}";
}
private static string DetermineIdType(string normalizedId)
{
if (CvePattern.IsMatch(normalizedId))
{
return "cve";
}
if (GhsaPattern.IsMatch(normalizedId))
{
return "ghsa";
}
if (RhsaPattern.IsMatch(normalizedId))
{
return "rhsa";
}
if (DsaPattern.IsMatch(normalizedId))
{
return "dsa";
}
if (UsnPattern.IsMatch(normalizedId))
{
return "usn";
}
if (MsrcPattern.IsMatch(normalizedId))
{
return "msrc";
}
return "other";
}
}
/// <summary>
/// Represents a canonicalized advisory key with preserved original identifiers.
/// </summary>
public sealed record VexCanonicalAdvisoryKey
{
public VexCanonicalAdvisoryKey(
string advisoryKey,
VexAdvisoryScope scope,
ImmutableArray<VexAdvisoryLink> links)
{
if (string.IsNullOrWhiteSpace(advisoryKey))
{
throw new ArgumentException("Advisory key must be provided.", nameof(advisoryKey));
}
AdvisoryKey = advisoryKey.Trim();
Scope = scope;
Links = links.IsDefault ? ImmutableArray<VexAdvisoryLink>.Empty : links;
}
/// <summary>
/// The canonical advisory key used for correlation and storage.
/// </summary>
public string AdvisoryKey { get; }
/// <summary>
/// The scope/authority level of the advisory.
/// </summary>
public VexAdvisoryScope Scope { get; }
/// <summary>
/// Original and alias identifiers preserved for traceability.
/// </summary>
public ImmutableArray<VexAdvisoryLink> Links { get; }
/// <summary>
/// Returns the original identifier if available.
/// </summary>
public string? OriginalId => Links.FirstOrDefault(l => l.IsOriginal)?.Identifier;
/// <summary>
/// Returns all non-original alias identifiers.
/// </summary>
public IEnumerable<string> Aliases => Links.Where(l => !l.IsOriginal).Select(l => l.Identifier);
}
/// <summary>
/// Represents a link to an original or alias advisory identifier.
/// </summary>
public sealed record VexAdvisoryLink
{
public VexAdvisoryLink(string identifier, string type, bool isOriginal)
{
if (string.IsNullOrWhiteSpace(identifier))
{
throw new ArgumentException("Identifier must be provided.", nameof(identifier));
}
if (string.IsNullOrWhiteSpace(type))
{
throw new ArgumentException("Type must be provided.", nameof(type));
}
Identifier = identifier.Trim();
Type = type.Trim().ToLowerInvariant();
IsOriginal = isOriginal;
}
/// <summary>
/// The advisory identifier value.
/// </summary>
public string Identifier { get; }
/// <summary>
/// The type of identifier (cve, ghsa, rhsa, dsa, usn, msrc, other).
/// </summary>
public string Type { get; }
/// <summary>
/// True if this is the original identifier provided at ingest time.
/// </summary>
public bool IsOriginal { get; }
}
/// <summary>
/// The scope/authority level of an advisory.
/// </summary>
public enum VexAdvisoryScope
{
/// <summary>
/// Unknown or unclassified scope.
/// </summary>
Unknown = 0,
/// <summary>
/// Global identifiers (e.g., CVE).
/// </summary>
Global = 1,
/// <summary>
/// Ecosystem-specific identifiers (e.g., GHSA).
/// </summary>
Ecosystem = 2,
/// <summary>
/// Vendor-specific identifiers (e.g., RHSA, MSRC).
/// </summary>
Vendor = 3,
/// <summary>
/// Distribution-specific identifiers (e.g., DSA, USN).
/// </summary>
Distribution = 4,
}

View File

@@ -0,0 +1,479 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
namespace StellaOps.Excititor.Core.Canonicalization;
/// <summary>
/// Canonicalizes product identifiers (PURL, CPE, OS package names) to a stable <see cref="VexCanonicalProductKey"/>.
/// Preserves original identifiers in the Links collection for traceability.
/// </summary>
public sealed class VexProductKeyCanonicalizer
{
private static readonly Regex PurlPattern = new(
@"^pkg:[a-z0-9]+/",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
private static readonly Regex CpePattern = new(
@"^cpe:(2\.3:|/)",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
// RPM NEVRA format: name-[epoch:]version-release.arch
// Release can contain dots (e.g., 1.el9), so we match until the last dot before arch
private static readonly Regex RpmNevraPattern = new(
@"^(?<name>[a-zA-Z0-9_+-]+)-(?<epoch>\d+:)?(?<version>[^-]+)-(?<release>.+)\.(?<arch>x86_64|i686|noarch|aarch64|s390x|ppc64le|armv7hl|src)$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
// Debian packages use underscores as separators: name_version_arch or name_version
// Must have at least one underscore to be considered a Debian package
private static readonly Regex DebianPackagePattern = new(
@"^(?<name>[a-z0-9][a-z0-9.+-]+)_(?<version>[^_]+)(_(?<arch>[a-z0-9-]+))?$",
RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
/// <summary>
/// Canonicalizes a product identifier and extracts scope metadata.
/// </summary>
/// <param name="originalKey">The original product key/identifier.</param>
/// <param name="purl">Optional PURL for the product.</param>
/// <param name="cpe">Optional CPE for the product.</param>
/// <param name="componentIdentifiers">Optional additional component identifiers.</param>
/// <returns>A canonical product key with preserved original links.</returns>
public VexCanonicalProductKey Canonicalize(
string originalKey,
string? purl = null,
string? cpe = null,
IEnumerable<string>? componentIdentifiers = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(originalKey);
// Check component identifiers for PURL if not provided directly
var effectivePurl = purl ?? ExtractPurlFromIdentifiers(componentIdentifiers);
var effectiveCpe = cpe ?? ExtractCpeFromIdentifiers(componentIdentifiers);
var keyType = DetermineKeyType(originalKey.Trim());
var scope = DetermineScope(originalKey.Trim(), effectivePurl, effectiveCpe);
var canonicalKey = BuildCanonicalKey(originalKey.Trim(), effectivePurl, effectiveCpe, keyType);
var linksBuilder = ImmutableArray.CreateBuilder<VexProductLink>();
// Add the original key as a link
linksBuilder.Add(new VexProductLink(
originalKey.Trim(),
keyType.ToString().ToLowerInvariant(),
isOriginal: true));
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase) { originalKey.Trim() };
// Add PURL if different from original
if (!string.IsNullOrWhiteSpace(purl) && seen.Add(purl.Trim()))
{
linksBuilder.Add(new VexProductLink(
purl.Trim(),
"purl",
isOriginal: false));
}
// Add CPE if different from original
if (!string.IsNullOrWhiteSpace(cpe) && seen.Add(cpe.Trim()))
{
linksBuilder.Add(new VexProductLink(
cpe.Trim(),
"cpe",
isOriginal: false));
}
// Add component identifiers
if (componentIdentifiers is not null)
{
foreach (var identifier in componentIdentifiers)
{
if (string.IsNullOrWhiteSpace(identifier))
{
continue;
}
var normalizedId = identifier.Trim();
if (!seen.Add(normalizedId))
{
continue;
}
var idType = DetermineKeyType(normalizedId);
linksBuilder.Add(new VexProductLink(
normalizedId,
idType.ToString().ToLowerInvariant(),
isOriginal: false));
}
}
return new VexCanonicalProductKey(
canonicalKey,
scope,
keyType,
linksBuilder.ToImmutable());
}
/// <summary>
/// Extracts PURL from component identifiers if available.
/// </summary>
public string? ExtractPurlFromIdentifiers(IEnumerable<string>? identifiers)
{
if (identifiers is null)
{
return null;
}
foreach (var id in identifiers)
{
if (string.IsNullOrWhiteSpace(id))
{
continue;
}
if (PurlPattern.IsMatch(id.Trim()))
{
return id.Trim();
}
}
return null;
}
/// <summary>
/// Extracts CPE from component identifiers if available.
/// </summary>
public string? ExtractCpeFromIdentifiers(IEnumerable<string>? identifiers)
{
if (identifiers is null)
{
return null;
}
foreach (var id in identifiers)
{
if (string.IsNullOrWhiteSpace(id))
{
continue;
}
if (CpePattern.IsMatch(id.Trim()))
{
return id.Trim();
}
}
return null;
}
private static VexProductKeyType DetermineKeyType(string key)
{
if (PurlPattern.IsMatch(key))
{
return VexProductKeyType.Purl;
}
if (CpePattern.IsMatch(key))
{
return VexProductKeyType.Cpe;
}
if (RpmNevraPattern.IsMatch(key))
{
return VexProductKeyType.RpmNevra;
}
if (DebianPackagePattern.IsMatch(key))
{
return VexProductKeyType.DebianPackage;
}
if (key.StartsWith("oci:", StringComparison.OrdinalIgnoreCase))
{
return VexProductKeyType.OciImage;
}
if (key.StartsWith("platform:", StringComparison.OrdinalIgnoreCase))
{
return VexProductKeyType.Platform;
}
return VexProductKeyType.Other;
}
private static VexProductScope DetermineScope(string key, string? purl, string? cpe)
{
// PURL is the most authoritative
if (!string.IsNullOrWhiteSpace(purl) || PurlPattern.IsMatch(key))
{
return VexProductScope.Package;
}
// CPE is next
if (!string.IsNullOrWhiteSpace(cpe) || CpePattern.IsMatch(key))
{
return VexProductScope.Component;
}
// OS packages
if (RpmNevraPattern.IsMatch(key) || DebianPackagePattern.IsMatch(key))
{
return VexProductScope.OsPackage;
}
// OCI images
if (key.StartsWith("oci:", StringComparison.OrdinalIgnoreCase))
{
return VexProductScope.Container;
}
// Platforms
if (key.StartsWith("platform:", StringComparison.OrdinalIgnoreCase))
{
return VexProductScope.Platform;
}
return VexProductScope.Unknown;
}
private static string BuildCanonicalKey(string key, string? purl, string? cpe, VexProductKeyType keyType)
{
// Prefer PURL as canonical key
if (!string.IsNullOrWhiteSpace(purl))
{
return NormalizePurl(purl.Trim());
}
if (PurlPattern.IsMatch(key))
{
return NormalizePurl(key);
}
// Fall back to CPE
if (!string.IsNullOrWhiteSpace(cpe))
{
return NormalizeCpe(cpe.Trim());
}
if (CpePattern.IsMatch(key))
{
return NormalizeCpe(key);
}
// For types that already have their prefix, return as-is
if (keyType == VexProductKeyType.OciImage && key.StartsWith("oci:", StringComparison.OrdinalIgnoreCase))
{
return key;
}
if (keyType == VexProductKeyType.Platform && key.StartsWith("platform:", StringComparison.OrdinalIgnoreCase))
{
return key;
}
// For other types, prefix for disambiguation
var prefix = keyType switch
{
VexProductKeyType.RpmNevra => "rpm",
VexProductKeyType.DebianPackage => "deb",
VexProductKeyType.OciImage => "oci",
VexProductKeyType.Platform => "platform",
_ => "product",
};
return $"{prefix}:{key}";
}
private static string NormalizePurl(string purl)
{
// Ensure lowercase scheme
if (purl.StartsWith("PKG:", StringComparison.OrdinalIgnoreCase))
{
return "pkg:" + purl.Substring(4);
}
return purl;
}
private static string NormalizeCpe(string cpe)
{
// Ensure lowercase scheme
if (cpe.StartsWith("CPE:", StringComparison.OrdinalIgnoreCase))
{
return "cpe:" + cpe.Substring(4);
}
return cpe;
}
}
/// <summary>
/// Represents a canonicalized product key with preserved original identifiers.
/// </summary>
public sealed record VexCanonicalProductKey
{
public VexCanonicalProductKey(
string productKey,
VexProductScope scope,
VexProductKeyType keyType,
ImmutableArray<VexProductLink> links)
{
if (string.IsNullOrWhiteSpace(productKey))
{
throw new ArgumentException("Product key must be provided.", nameof(productKey));
}
ProductKey = productKey.Trim();
Scope = scope;
KeyType = keyType;
Links = links.IsDefault ? ImmutableArray<VexProductLink>.Empty : links;
}
/// <summary>
/// The canonical product key used for correlation and storage.
/// </summary>
public string ProductKey { get; }
/// <summary>
/// The scope/authority level of the product identifier.
/// </summary>
public VexProductScope Scope { get; }
/// <summary>
/// The type of the canonical key.
/// </summary>
public VexProductKeyType KeyType { get; }
/// <summary>
/// Original and alias identifiers preserved for traceability.
/// </summary>
public ImmutableArray<VexProductLink> Links { get; }
/// <summary>
/// Returns the original identifier if available.
/// </summary>
public string? OriginalKey => Links.FirstOrDefault(l => l.IsOriginal)?.Identifier;
/// <summary>
/// Returns the PURL link if available.
/// </summary>
public string? Purl => Links.FirstOrDefault(l => l.Type == "purl")?.Identifier;
/// <summary>
/// Returns the CPE link if available.
/// </summary>
public string? Cpe => Links.FirstOrDefault(l => l.Type == "cpe")?.Identifier;
}
/// <summary>
/// Represents a link to an original or alias product identifier.
/// </summary>
public sealed record VexProductLink
{
public VexProductLink(string identifier, string type, bool isOriginal)
{
if (string.IsNullOrWhiteSpace(identifier))
{
throw new ArgumentException("Identifier must be provided.", nameof(identifier));
}
if (string.IsNullOrWhiteSpace(type))
{
throw new ArgumentException("Type must be provided.", nameof(type));
}
Identifier = identifier.Trim();
Type = type.Trim().ToLowerInvariant();
IsOriginal = isOriginal;
}
/// <summary>
/// The product identifier value.
/// </summary>
public string Identifier { get; }
/// <summary>
/// The type of identifier (purl, cpe, rpm, deb, oci, platform, other).
/// </summary>
public string Type { get; }
/// <summary>
/// True if this is the original identifier provided at ingest time.
/// </summary>
public bool IsOriginal { get; }
}
/// <summary>
/// The scope/authority level of a product identifier.
/// </summary>
public enum VexProductScope
{
/// <summary>
/// Unknown or unclassified scope.
/// </summary>
Unknown = 0,
/// <summary>
/// Package-level identifier (PURL).
/// </summary>
Package = 1,
/// <summary>
/// Component-level identifier (CPE).
/// </summary>
Component = 2,
/// <summary>
/// OS package identifier (RPM, DEB).
/// </summary>
OsPackage = 3,
/// <summary>
/// Container image identifier.
/// </summary>
Container = 4,
/// <summary>
/// Platform-level identifier.
/// </summary>
Platform = 5,
}
/// <summary>
/// The type of product key identifier.
/// </summary>
public enum VexProductKeyType
{
/// <summary>
/// Other/unknown type.
/// </summary>
Other = 0,
/// <summary>
/// Package URL (PURL).
/// </summary>
Purl = 1,
/// <summary>
/// Common Platform Enumeration (CPE).
/// </summary>
Cpe = 2,
/// <summary>
/// RPM NEVRA format.
/// </summary>
RpmNevra = 3,
/// <summary>
/// Debian package format.
/// </summary>
DebianPackage = 4,
/// <summary>
/// OCI image reference.
/// </summary>
OciImage = 5,
/// <summary>
/// Platform identifier.
/// </summary>
Platform = 6,
}

View File

@@ -0,0 +1,187 @@
using System;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Service interface for creating and verifying DSSE attestations on evidence locker manifests.
/// </summary>
public interface IVexEvidenceAttestor
{
/// <summary>
/// Creates a DSSE attestation for the given manifest and returns the signed manifest.
/// </summary>
ValueTask<VexEvidenceAttestationResult> AttestManifestAsync(
VexLockerManifest manifest,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies an attestation for the given manifest.
/// </summary>
ValueTask<VexEvidenceVerificationResult> VerifyAttestationAsync(
VexLockerManifest manifest,
string dsseEnvelopeJson,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of attesting an evidence manifest.
/// </summary>
public sealed record VexEvidenceAttestationResult
{
public VexEvidenceAttestationResult(
VexLockerManifest signedManifest,
string dsseEnvelopeJson,
string dsseEnvelopeHash,
string attestationId,
DateTimeOffset attestedAt)
{
SignedManifest = signedManifest ?? throw new ArgumentNullException(nameof(signedManifest));
DsseEnvelopeJson = EnsureNotNullOrWhiteSpace(dsseEnvelopeJson, nameof(dsseEnvelopeJson));
DsseEnvelopeHash = EnsureNotNullOrWhiteSpace(dsseEnvelopeHash, nameof(dsseEnvelopeHash));
AttestationId = EnsureNotNullOrWhiteSpace(attestationId, nameof(attestationId));
AttestedAt = attestedAt;
}
/// <summary>
/// The manifest with the attestation signature attached.
/// </summary>
public VexLockerManifest SignedManifest { get; }
/// <summary>
/// The DSSE envelope as JSON.
/// </summary>
public string DsseEnvelopeJson { get; }
/// <summary>
/// SHA-256 hash of the DSSE envelope.
/// </summary>
public string DsseEnvelopeHash { get; }
/// <summary>
/// Unique identifier for this attestation.
/// </summary>
public string AttestationId { get; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset AttestedAt { get; }
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
}
/// <summary>
/// Result of verifying an evidence attestation.
/// </summary>
public sealed record VexEvidenceVerificationResult
{
public VexEvidenceVerificationResult(
bool isValid,
string? failureReason = null,
ImmutableDictionary<string, string>? diagnostics = null)
{
IsValid = isValid;
FailureReason = failureReason?.Trim();
Diagnostics = diagnostics ?? ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// Whether the attestation is valid.
/// </summary>
public bool IsValid { get; }
/// <summary>
/// Reason for failure if not valid.
/// </summary>
public string? FailureReason { get; }
/// <summary>
/// Additional diagnostic information.
/// </summary>
public ImmutableDictionary<string, string> Diagnostics { get; }
public static VexEvidenceVerificationResult Success(ImmutableDictionary<string, string>? diagnostics = null)
=> new(true, null, diagnostics);
public static VexEvidenceVerificationResult Failure(string reason, ImmutableDictionary<string, string>? diagnostics = null)
=> new(false, reason, diagnostics);
}
/// <summary>
/// in-toto statement for evidence locker attestations.
/// </summary>
public sealed record VexEvidenceInTotoStatement
{
public const string InTotoStatementType = "https://in-toto.io/Statement/v1";
public const string EvidenceLockerPredicateType = "https://stella-ops.org/attestations/evidence-locker/v1";
public VexEvidenceInTotoStatement(
ImmutableArray<VexEvidenceInTotoSubject> subjects,
VexEvidenceAttestationPredicate predicate)
{
Type = InTotoStatementType;
Subjects = subjects;
PredicateType = EvidenceLockerPredicateType;
Predicate = predicate ?? throw new ArgumentNullException(nameof(predicate));
}
public string Type { get; }
public ImmutableArray<VexEvidenceInTotoSubject> Subjects { get; }
public string PredicateType { get; }
public VexEvidenceAttestationPredicate Predicate { get; }
}
/// <summary>
/// Subject of an evidence locker attestation.
/// </summary>
public sealed record VexEvidenceInTotoSubject(
string Name,
ImmutableDictionary<string, string> Digest);
/// <summary>
/// Predicate for evidence locker attestations.
/// </summary>
public sealed record VexEvidenceAttestationPredicate
{
public VexEvidenceAttestationPredicate(
string manifestId,
string tenant,
string merkleRoot,
int itemCount,
DateTimeOffset createdAt,
ImmutableDictionary<string, string>? metadata = null)
{
ManifestId = EnsureNotNullOrWhiteSpace(manifestId, nameof(manifestId));
Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant));
MerkleRoot = EnsureNotNullOrWhiteSpace(merkleRoot, nameof(merkleRoot));
ItemCount = itemCount;
CreatedAt = createdAt;
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty;
}
public string ManifestId { get; }
public string Tenant { get; }
public string MerkleRoot { get; }
public int ItemCount { get; }
public DateTimeOffset CreatedAt { get; }
public ImmutableDictionary<string, string> Metadata { get; }
public static VexEvidenceAttestationPredicate FromManifest(VexLockerManifest manifest)
{
ArgumentNullException.ThrowIfNull(manifest);
return new VexEvidenceAttestationPredicate(
manifest.ManifestId,
manifest.Tenant,
manifest.MerkleRoot,
manifest.Items.Length,
manifest.CreatedAt,
manifest.Metadata);
}
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
}

View File

@@ -0,0 +1,127 @@
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Service interface for building evidence locker payloads and Merkle manifests.
/// </summary>
public interface IVexEvidenceLockerService
{
/// <summary>
/// Creates an evidence snapshot item from an observation.
/// </summary>
VexEvidenceSnapshotItem CreateSnapshotItem(
VexObservation observation,
string linksetId,
VexEvidenceProvenance? provenance = null);
/// <summary>
/// Builds a locker manifest from a collection of observations.
/// </summary>
VexLockerManifest BuildManifest(
string tenant,
IEnumerable<VexObservation> observations,
Func<VexObservation, string> linksetIdSelector,
DateTimeOffset? timestamp = null,
int sequence = 1,
bool isSealed = false);
/// <summary>
/// Builds a locker manifest from pre-built snapshot items.
/// </summary>
VexLockerManifest BuildManifest(
string tenant,
IEnumerable<VexEvidenceSnapshotItem> items,
DateTimeOffset? timestamp = null,
int sequence = 1,
bool isSealed = false);
/// <summary>
/// Verifies a manifest's Merkle root against its items.
/// </summary>
bool VerifyManifest(VexLockerManifest manifest);
}
/// <summary>
/// Default implementation of <see cref="IVexEvidenceLockerService"/>.
/// </summary>
public sealed class VexEvidenceLockerService : IVexEvidenceLockerService
{
private readonly TimeProvider _timeProvider;
public VexEvidenceLockerService(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public VexEvidenceSnapshotItem CreateSnapshotItem(
VexObservation observation,
string linksetId,
VexEvidenceProvenance? provenance = null)
{
ArgumentNullException.ThrowIfNull(observation);
if (string.IsNullOrWhiteSpace(linksetId))
{
throw new ArgumentException("linksetId must be provided.", nameof(linksetId));
}
return new VexEvidenceSnapshotItem(
observationId: observation.ObservationId,
providerId: observation.ProviderId,
contentHash: observation.Upstream.ContentHash,
linksetId: linksetId,
dsseEnvelopeHash: null, // Populated by OBS-54-001
provenance: provenance ?? VexEvidenceProvenance.Empty);
}
public VexLockerManifest BuildManifest(
string tenant,
IEnumerable<VexObservation> observations,
Func<VexObservation, string> linksetIdSelector,
DateTimeOffset? timestamp = null,
int sequence = 1,
bool isSealed = false)
{
ArgumentNullException.ThrowIfNull(observations);
ArgumentNullException.ThrowIfNull(linksetIdSelector);
var items = observations
.Where(o => o is not null)
.Select(o => CreateSnapshotItem(o, linksetIdSelector(o)))
.ToList();
return BuildManifest(tenant, items, timestamp, sequence, isSealed);
}
public VexLockerManifest BuildManifest(
string tenant,
IEnumerable<VexEvidenceSnapshotItem> items,
DateTimeOffset? timestamp = null,
int sequence = 1,
bool isSealed = false)
{
var ts = timestamp ?? _timeProvider.GetUtcNow();
var manifestId = VexLockerManifest.CreateManifestId(tenant, ts, sequence);
var metadata = isSealed
? System.Collections.Immutable.ImmutableDictionary<string, string>.Empty.Add("sealed", "true")
: System.Collections.Immutable.ImmutableDictionary<string, string>.Empty;
return new VexLockerManifest(
tenant: tenant,
manifestId: manifestId,
createdAt: ts,
items: items,
signature: null,
metadata: metadata);
}
public bool VerifyManifest(VexLockerManifest manifest)
{
ArgumentNullException.ThrowIfNull(manifest);
var expectedRoot = VexLockerManifest.ComputeMerkleRoot(manifest.Items);
return string.Equals(manifest.MerkleRoot, expectedRoot, StringComparison.OrdinalIgnoreCase);
}
}

View File

@@ -0,0 +1,299 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Represents a single evidence item in a locker payload for sealed-mode auditing.
/// </summary>
public sealed record VexEvidenceSnapshotItem
{
public VexEvidenceSnapshotItem(
string observationId,
string providerId,
string contentHash,
string linksetId,
string? dsseEnvelopeHash = null,
VexEvidenceProvenance? provenance = null)
{
ObservationId = EnsureNotNullOrWhiteSpace(observationId, nameof(observationId));
ProviderId = EnsureNotNullOrWhiteSpace(providerId, nameof(providerId)).ToLowerInvariant();
ContentHash = EnsureNotNullOrWhiteSpace(contentHash, nameof(contentHash));
LinksetId = EnsureNotNullOrWhiteSpace(linksetId, nameof(linksetId));
DsseEnvelopeHash = TrimToNull(dsseEnvelopeHash);
Provenance = provenance ?? VexEvidenceProvenance.Empty;
}
/// <summary>
/// The observation ID this evidence corresponds to.
/// </summary>
[JsonPropertyName("observationId")]
public string ObservationId { get; }
/// <summary>
/// The provider that supplied this evidence.
/// </summary>
[JsonPropertyName("providerId")]
public string ProviderId { get; }
/// <summary>
/// SHA-256 hash of the raw observation content.
/// </summary>
[JsonPropertyName("contentHash")]
public string ContentHash { get; }
/// <summary>
/// The linkset ID this evidence relates to.
/// </summary>
[JsonPropertyName("linksetId")]
public string LinksetId { get; }
/// <summary>
/// Optional DSSE envelope hash when attestations are enabled.
/// </summary>
[JsonPropertyName("dsseEnvelopeHash")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DsseEnvelopeHash { get; }
/// <summary>
/// Provenance information for this evidence.
/// </summary>
[JsonPropertyName("provenance")]
public VexEvidenceProvenance Provenance { get; }
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
private static string? TrimToNull(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}
/// <summary>
/// Provenance information for evidence items.
/// </summary>
public sealed record VexEvidenceProvenance
{
public static readonly VexEvidenceProvenance Empty = new("ingest", null, null);
public VexEvidenceProvenance(
string source,
int? mirrorGeneration = null,
string? exportCenterManifest = null)
{
Source = EnsureNotNullOrWhiteSpace(source, nameof(source)).ToLowerInvariant();
MirrorGeneration = mirrorGeneration;
ExportCenterManifest = TrimToNull(exportCenterManifest);
}
/// <summary>
/// Source type: "mirror" or "ingest".
/// </summary>
[JsonPropertyName("source")]
public string Source { get; }
/// <summary>
/// Mirror generation number when source is "mirror".
/// </summary>
[JsonPropertyName("mirrorGeneration")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? MirrorGeneration { get; }
/// <summary>
/// Export center manifest hash when available.
/// </summary>
[JsonPropertyName("exportCenterManifest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ExportCenterManifest { get; }
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
private static string? TrimToNull(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}
/// <summary>
/// Locker manifest containing evidence snapshots with Merkle root for verification.
/// </summary>
public sealed record VexLockerManifest
{
public VexLockerManifest(
string tenant,
string manifestId,
DateTimeOffset createdAt,
IEnumerable<VexEvidenceSnapshotItem> items,
string? signature = null,
ImmutableDictionary<string, string>? metadata = null)
{
Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
ManifestId = EnsureNotNullOrWhiteSpace(manifestId, nameof(manifestId));
CreatedAt = createdAt.ToUniversalTime();
Items = NormalizeItems(items);
MerkleRoot = ComputeMerkleRoot(Items);
Signature = TrimToNull(signature);
Metadata = NormalizeMetadata(metadata);
}
/// <summary>
/// Tenant this manifest belongs to.
/// </summary>
[JsonPropertyName("tenant")]
public string Tenant { get; }
/// <summary>
/// Unique manifest identifier.
/// </summary>
[JsonPropertyName("manifestId")]
public string ManifestId { get; }
/// <summary>
/// When this manifest was created.
/// </summary>
[JsonPropertyName("createdAt")]
public DateTimeOffset CreatedAt { get; }
/// <summary>
/// Evidence items in deterministic order.
/// </summary>
[JsonPropertyName("items")]
public ImmutableArray<VexEvidenceSnapshotItem> Items { get; }
/// <summary>
/// Merkle root computed over item content hashes.
/// </summary>
[JsonPropertyName("merkleRoot")]
public string MerkleRoot { get; }
/// <summary>
/// Optional DSSE signature (populated by OBS-54-001).
/// </summary>
[JsonPropertyName("signature")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Signature { get; }
/// <summary>
/// Additional metadata (e.g., sealed mode flag).
/// </summary>
[JsonPropertyName("metadata")]
public ImmutableDictionary<string, string> Metadata { get; }
/// <summary>
/// Creates a new manifest with an attached signature.
/// </summary>
public VexLockerManifest WithSignature(string signature)
{
return new VexLockerManifest(
Tenant,
ManifestId,
CreatedAt,
Items,
signature,
Metadata);
}
/// <summary>
/// Creates a deterministic manifest ID.
/// </summary>
public static string CreateManifestId(string tenant, DateTimeOffset timestamp, int sequence)
{
var normalizedTenant = (tenant ?? "default").Trim().ToLowerInvariant();
var date = timestamp.ToUniversalTime().ToString("yyyy-MM-dd");
return $"locker:excititor:{normalizedTenant}:{date}:{sequence:D4}";
}
/// <summary>
/// Computes Merkle root from a list of hashes.
/// </summary>
public static string ComputeMerkleRoot(ImmutableArray<VexEvidenceSnapshotItem> items)
{
if (items.Length == 0)
{
return "sha256:" + Convert.ToHexString(SHA256.HashData(Array.Empty<byte>())).ToLowerInvariant();
}
var hashes = items
.Select(i => i.ContentHash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? i.ContentHash[7..]
: i.ContentHash)
.ToList();
return ComputeMerkleRootFromHashes(hashes);
}
private static string ComputeMerkleRootFromHashes(List<string> hashes)
{
if (hashes.Count == 0)
{
return "sha256:" + Convert.ToHexString(SHA256.HashData(Array.Empty<byte>())).ToLowerInvariant();
}
if (hashes.Count == 1)
{
return "sha256:" + hashes[0].ToLowerInvariant();
}
// Pad to even number if necessary
if (hashes.Count % 2 != 0)
{
hashes.Add(hashes[^1]);
}
var nextLevel = new List<string>();
for (var i = 0; i < hashes.Count; i += 2)
{
var combined = hashes[i].ToLowerInvariant() + hashes[i + 1].ToLowerInvariant();
var bytes = Convert.FromHexString(combined);
var hash = SHA256.HashData(bytes);
nextLevel.Add(Convert.ToHexString(hash).ToLowerInvariant());
}
return ComputeMerkleRootFromHashes(nextLevel);
}
private static ImmutableArray<VexEvidenceSnapshotItem> NormalizeItems(IEnumerable<VexEvidenceSnapshotItem>? items)
{
if (items is null)
{
return ImmutableArray<VexEvidenceSnapshotItem>.Empty;
}
// Sort by observationId, then providerId for deterministic ordering
return items
.Where(i => i is not null)
.OrderBy(i => i.ObservationId, StringComparer.Ordinal)
.ThenBy(i => i.ProviderId, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata)
{
if (metadata is null || metadata.Count == 0)
{
return ImmutableDictionary<string, string>.Empty;
}
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
foreach (var pair in metadata.OrderBy(kv => kv.Key, StringComparer.Ordinal))
{
var key = TrimToNull(pair.Key);
var value = TrimToNull(pair.Value);
if (key is null || value is null)
{
continue;
}
builder[key] = value;
}
return builder.ToImmutable();
}
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
private static string? TrimToNull(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Publishes vex.linkset.updated events to downstream consumers.
/// Implementations may persist to MongoDB, publish to NATS, or both.
/// </summary>
public interface IVexLinksetEventPublisher
{
/// <summary>
/// Publishes a linkset updated event.
/// </summary>
Task PublishAsync(VexLinksetUpdatedEvent @event, CancellationToken cancellationToken);
/// <summary>
/// Publishes multiple linkset updated events in a batch.
/// </summary>
Task PublishManyAsync(IEnumerable<VexLinksetUpdatedEvent> events, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,96 @@
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Persistence abstraction for VEX linksets with tenant-isolated operations.
/// Linksets correlate observations and capture conflict annotations.
/// </summary>
public interface IVexLinksetStore
{
/// <summary>
/// Persists a new linkset. Returns true if inserted, false if it already exists.
/// </summary>
ValueTask<bool> InsertAsync(
VexLinkset linkset,
CancellationToken cancellationToken);
/// <summary>
/// Persists or updates a linkset. Returns true if inserted, false if updated.
/// </summary>
ValueTask<bool> UpsertAsync(
VexLinkset linkset,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves a linkset by tenant and linkset ID.
/// </summary>
ValueTask<VexLinkset?> GetByIdAsync(
string tenant,
string linksetId,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves or creates a linkset for the given vulnerability and product key.
/// </summary>
ValueTask<VexLinkset> GetOrCreateAsync(
string tenant,
string vulnerabilityId,
string productKey,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets by vulnerability ID.
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindByVulnerabilityAsync(
string tenant,
string vulnerabilityId,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets by product key.
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindByProductKeyAsync(
string tenant,
string productKey,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets that have disagreements (conflicts).
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindWithConflictsAsync(
string tenant,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets by provider ID.
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindByProviderAsync(
string tenant,
string providerId,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Deletes a linkset by tenant and linkset ID. Returns true if deleted.
/// </summary>
ValueTask<bool> DeleteAsync(
string tenant,
string linksetId,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of linksets for the specified tenant.
/// </summary>
ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of linksets with conflicts for the specified tenant.
/// </summary>
ValueTask<long> CountWithConflictsAsync(
string tenant,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,70 @@
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Persistence abstraction for VEX observations with tenant-isolated write operations.
/// </summary>
public interface IVexObservationStore
{
/// <summary>
/// Persists a new observation. Returns true if inserted, false if it already exists.
/// </summary>
ValueTask<bool> InsertAsync(
VexObservation observation,
CancellationToken cancellationToken);
/// <summary>
/// Persists or updates an observation. Returns true if inserted, false if updated.
/// </summary>
ValueTask<bool> UpsertAsync(
VexObservation observation,
CancellationToken cancellationToken);
/// <summary>
/// Persists multiple observations in a batch. Returns the count of newly inserted observations.
/// </summary>
ValueTask<int> InsertManyAsync(
string tenant,
IEnumerable<VexObservation> observations,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves an observation by tenant and observation ID.
/// </summary>
ValueTask<VexObservation?> GetByIdAsync(
string tenant,
string observationId,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves observations for a specific vulnerability and product key.
/// </summary>
ValueTask<IReadOnlyList<VexObservation>> FindByVulnerabilityAndProductAsync(
string tenant,
string vulnerabilityId,
string productKey,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves observations by provider.
/// </summary>
ValueTask<IReadOnlyList<VexObservation>> FindByProviderAsync(
string tenant,
string providerId,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Deletes an observation by tenant and observation ID. Returns true if deleted.
/// </summary>
ValueTask<bool> DeleteAsync(
string tenant,
string observationId,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of observations for the specified tenant.
/// </summary>
ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,129 @@
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Service interface for emitting timeline events during ingest/linkset operations.
/// Implementations should emit events asynchronously without blocking the main operation.
/// </summary>
public interface IVexTimelineEventEmitter
{
/// <summary>
/// Emits a timeline event for an observation ingest operation.
/// </summary>
ValueTask EmitObservationIngestAsync(
string tenant,
string providerId,
string streamId,
string traceId,
string observationId,
string evidenceHash,
string justificationSummary,
ImmutableDictionary<string, string>? attributes = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits a timeline event for a linkset update operation.
/// </summary>
ValueTask EmitLinksetUpdateAsync(
string tenant,
string providerId,
string streamId,
string traceId,
string linksetId,
string vulnerabilityId,
string productKey,
string payloadHash,
string justificationSummary,
ImmutableDictionary<string, string>? attributes = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits a timeline event for a generic operation.
/// </summary>
ValueTask EmitAsync(
TimelineEvent evt,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits multiple timeline events in a batch.
/// </summary>
ValueTask EmitBatchAsync(
string tenant,
IEnumerable<TimelineEvent> events,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Well-known timeline event types for Excititor operations.
/// </summary>
public static class VexTimelineEventTypes
{
/// <summary>
/// An observation was ingested.
/// </summary>
public const string ObservationIngested = "vex.observation.ingested";
/// <summary>
/// An observation was updated.
/// </summary>
public const string ObservationUpdated = "vex.observation.updated";
/// <summary>
/// An observation was superseded by another.
/// </summary>
public const string ObservationSuperseded = "vex.observation.superseded";
/// <summary>
/// A linkset was created.
/// </summary>
public const string LinksetCreated = "vex.linkset.created";
/// <summary>
/// A linkset was updated with new observations.
/// </summary>
public const string LinksetUpdated = "vex.linkset.updated";
/// <summary>
/// A linkset conflict was detected.
/// </summary>
public const string LinksetConflictDetected = "vex.linkset.conflict_detected";
/// <summary>
/// A linkset conflict was resolved.
/// </summary>
public const string LinksetConflictResolved = "vex.linkset.conflict_resolved";
/// <summary>
/// Evidence was sealed to the locker.
/// </summary>
public const string EvidenceSealed = "vex.evidence.sealed";
/// <summary>
/// An attestation was attached.
/// </summary>
public const string AttestationAttached = "vex.attestation.attached";
/// <summary>
/// An attestation was verified.
/// </summary>
public const string AttestationVerified = "vex.attestation.verified";
}
/// <summary>
/// Well-known attribute keys for timeline events.
/// </summary>
public static class VexTimelineEventAttributes
{
public const string ObservationId = "observation_id";
public const string LinksetId = "linkset_id";
public const string VulnerabilityId = "vulnerability_id";
public const string ProductKey = "product_key";
public const string Status = "status";
public const string ConflictType = "conflict_type";
public const string AttestationId = "attestation_id";
public const string SupersededBy = "superseded_by";
public const string Supersedes = "supersedes";
public const string ObservationCount = "observation_count";
public const string ConflictCount = "conflict_count";
}

View File

@@ -0,0 +1,92 @@
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Persistence abstraction for VEX timeline events.
/// Timeline events capture ingest/linkset changes with trace IDs, justification summaries,
/// and evidence hashes so downstream systems can replay raw facts chronologically.
/// </summary>
public interface IVexTimelineEventStore
{
/// <summary>
/// Persists a new timeline event. Returns the event ID if successful.
/// </summary>
ValueTask<string> InsertAsync(
TimelineEvent evt,
CancellationToken cancellationToken);
/// <summary>
/// Persists multiple timeline events in a batch. Returns the count of successfully inserted events.
/// </summary>
ValueTask<int> InsertManyAsync(
string tenant,
IEnumerable<TimelineEvent> events,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves timeline events for a tenant within a time range.
/// </summary>
ValueTask<IReadOnlyList<TimelineEvent>> FindByTimeRangeAsync(
string tenant,
DateTimeOffset from,
DateTimeOffset to,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves timeline events by trace ID for correlation.
/// </summary>
ValueTask<IReadOnlyList<TimelineEvent>> FindByTraceIdAsync(
string tenant,
string traceId,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves timeline events by provider ID.
/// </summary>
ValueTask<IReadOnlyList<TimelineEvent>> FindByProviderAsync(
string tenant,
string providerId,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves timeline events by event type.
/// </summary>
ValueTask<IReadOnlyList<TimelineEvent>> FindByEventTypeAsync(
string tenant,
string eventType,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves the most recent timeline events for a tenant.
/// </summary>
ValueTask<IReadOnlyList<TimelineEvent>> GetRecentAsync(
string tenant,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves a single timeline event by ID.
/// </summary>
ValueTask<TimelineEvent?> GetByIdAsync(
string tenant,
string eventId,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of timeline events for the specified tenant.
/// </summary>
ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of timeline events for the specified tenant within a time range.
/// </summary>
ValueTask<long> CountInRangeAsync(
string tenant,
DateTimeOffset from,
DateTimeOffset to,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,298 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Represents a VEX linkset correlating multiple observations for a specific
/// vulnerability and product key. Linksets capture disagreements (conflicts)
/// between providers without deciding a winner.
/// </summary>
public sealed record VexLinkset
{
public VexLinkset(
string linksetId,
string tenant,
string vulnerabilityId,
string productKey,
IEnumerable<VexLinksetObservationRefModel> observations,
IEnumerable<VexObservationDisagreement>? disagreements = null,
DateTimeOffset? createdAt = null,
DateTimeOffset? updatedAt = null)
{
LinksetId = VexObservation.EnsureNotNullOrWhiteSpace(linksetId, nameof(linksetId));
Tenant = VexObservation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
VulnerabilityId = VexObservation.EnsureNotNullOrWhiteSpace(vulnerabilityId, nameof(vulnerabilityId));
ProductKey = VexObservation.EnsureNotNullOrWhiteSpace(productKey, nameof(productKey));
Observations = NormalizeObservations(observations);
Disagreements = NormalizeDisagreements(disagreements);
CreatedAt = (createdAt ?? DateTimeOffset.UtcNow).ToUniversalTime();
UpdatedAt = (updatedAt ?? CreatedAt).ToUniversalTime();
}
/// <summary>
/// Unique identifier for this linkset. Typically a SHA256 hash over
/// (tenant, vulnerabilityId, productKey) for deterministic addressing.
/// </summary>
public string LinksetId { get; }
/// <summary>
/// Tenant identifier (normalized to lowercase).
/// </summary>
public string Tenant { get; }
/// <summary>
/// The vulnerability identifier (CVE, GHSA, vendor ID).
/// </summary>
public string VulnerabilityId { get; }
/// <summary>
/// Product key (typically a PURL or CPE).
/// </summary>
public string ProductKey { get; }
/// <summary>
/// References to observations that contribute to this linkset.
/// </summary>
public ImmutableArray<VexLinksetObservationRefModel> Observations { get; }
/// <summary>
/// Conflict annotations capturing disagreements between providers.
/// </summary>
public ImmutableArray<VexObservationDisagreement> Disagreements { get; }
/// <summary>
/// When this linkset was first created.
/// </summary>
public DateTimeOffset CreatedAt { get; }
/// <summary>
/// When this linkset was last updated.
/// </summary>
public DateTimeOffset UpdatedAt { get; }
/// <summary>
/// Distinct provider IDs contributing to this linkset.
/// </summary>
public IReadOnlyList<string> ProviderIds =>
Observations.Select(o => o.ProviderId)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(p => p, StringComparer.OrdinalIgnoreCase)
.ToList();
/// <summary>
/// Distinct statuses observed in this linkset.
/// </summary>
public IReadOnlyList<string> Statuses =>
Observations.Select(o => o.Status)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(s => s, StringComparer.OrdinalIgnoreCase)
.ToList();
/// <summary>
/// Whether this linkset contains disagreements (conflicts).
/// </summary>
public bool HasConflicts => !Disagreements.IsDefaultOrEmpty && Disagreements.Length > 0;
/// <summary>
/// Confidence level based on the linkset state.
/// </summary>
public VexLinksetConfidence Confidence
{
get
{
if (HasConflicts)
{
return VexLinksetConfidence.Low;
}
if (Observations.Length == 0)
{
return VexLinksetConfidence.Low;
}
var distinctStatuses = Statuses.Count;
if (distinctStatuses > 1)
{
return VexLinksetConfidence.Low;
}
var distinctProviders = ProviderIds.Count;
if (distinctProviders >= 2)
{
return VexLinksetConfidence.High;
}
return VexLinksetConfidence.Medium;
}
}
/// <summary>
/// Creates a deterministic linkset ID from key components.
/// </summary>
public static string CreateLinksetId(string tenant, string vulnerabilityId, string productKey)
{
var normalizedTenant = (tenant ?? string.Empty).Trim().ToLowerInvariant();
var normalizedVuln = (vulnerabilityId ?? string.Empty).Trim();
var normalizedProduct = (productKey ?? string.Empty).Trim();
var input = $"{normalizedTenant}|{normalizedVuln}|{normalizedProduct}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>
/// Creates a new linkset with updated observations and recomputed disagreements.
/// </summary>
public VexLinkset WithObservations(
IEnumerable<VexLinksetObservationRefModel> observations,
IEnumerable<VexObservationDisagreement>? disagreements = null)
{
return new VexLinkset(
LinksetId,
Tenant,
VulnerabilityId,
ProductKey,
observations,
disagreements,
CreatedAt,
DateTimeOffset.UtcNow);
}
private static ImmutableArray<VexLinksetObservationRefModel> NormalizeObservations(
IEnumerable<VexLinksetObservationRefModel>? observations)
{
if (observations is null)
{
return ImmutableArray<VexLinksetObservationRefModel>.Empty;
}
var set = new SortedSet<VexLinksetObservationRefModel>(VexLinksetObservationRefComparer.Instance);
foreach (var item in observations)
{
if (item is null)
{
continue;
}
var obsId = VexObservation.TrimToNull(item.ObservationId);
var provider = VexObservation.TrimToNull(item.ProviderId);
var status = VexObservation.TrimToNull(item.Status);
if (obsId is null || provider is null || status is null)
{
continue;
}
double? clamped = item.Confidence is null ? null : Math.Clamp(item.Confidence.Value, 0.0, 1.0);
set.Add(new VexLinksetObservationRefModel(obsId, provider, status, clamped));
}
return set.Count == 0 ? ImmutableArray<VexLinksetObservationRefModel>.Empty : set.ToImmutableArray();
}
private static ImmutableArray<VexObservationDisagreement> NormalizeDisagreements(
IEnumerable<VexObservationDisagreement>? disagreements)
{
if (disagreements is null)
{
return ImmutableArray<VexObservationDisagreement>.Empty;
}
var set = new SortedSet<VexObservationDisagreement>(DisagreementComparer.Instance);
foreach (var disagreement in disagreements)
{
if (disagreement is null)
{
continue;
}
var normalizedProvider = VexObservation.TrimToNull(disagreement.ProviderId);
var normalizedStatus = VexObservation.TrimToNull(disagreement.Status);
if (normalizedProvider is null || normalizedStatus is null)
{
continue;
}
var normalizedJustification = VexObservation.TrimToNull(disagreement.Justification);
double? clampedConfidence = disagreement.Confidence is null
? null
: Math.Clamp(disagreement.Confidence.Value, 0.0, 1.0);
set.Add(new VexObservationDisagreement(
normalizedProvider,
normalizedStatus,
normalizedJustification,
clampedConfidence));
}
return set.Count == 0 ? ImmutableArray<VexObservationDisagreement>.Empty : set.ToImmutableArray();
}
private sealed class DisagreementComparer : IComparer<VexObservationDisagreement>
{
public static readonly DisagreementComparer Instance = new();
public int Compare(VexObservationDisagreement? x, VexObservationDisagreement? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var providerCompare = StringComparer.OrdinalIgnoreCase.Compare(x.ProviderId, y.ProviderId);
if (providerCompare != 0)
{
return providerCompare;
}
var statusCompare = StringComparer.OrdinalIgnoreCase.Compare(x.Status, y.Status);
if (statusCompare != 0)
{
return statusCompare;
}
var justificationCompare = StringComparer.OrdinalIgnoreCase.Compare(
x.Justification ?? string.Empty,
y.Justification ?? string.Empty);
if (justificationCompare != 0)
{
return justificationCompare;
}
return Nullable.Compare(x.Confidence, y.Confidence);
}
}
}
/// <summary>
/// Confidence level for a linkset based on agreement between providers.
/// </summary>
public enum VexLinksetConfidence
{
/// <summary>
/// Low confidence: conflicts exist or insufficient observations.
/// </summary>
Low,
/// <summary>
/// Medium confidence: single provider or consistent observations.
/// </summary>
Medium,
/// <summary>
/// High confidence: multiple providers agree.
/// </summary>
High
}

View File

@@ -0,0 +1,221 @@
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Computes disagreements (conflicts) from VEX observations without choosing winners.
/// Excititor remains aggregation-only; downstream consumers use disagreements to highlight
/// conflicts and apply their own decision rules (AOC-19-002).
/// </summary>
public sealed class VexLinksetDisagreementService
{
/// <summary>
/// Analyzes observations and returns disagreements where providers report different
/// statuses or justifications for the same vulnerability/product combination.
/// </summary>
public ImmutableArray<VexObservationDisagreement> ComputeDisagreements(
IEnumerable<VexObservation> observations)
{
if (observations is null)
{
return ImmutableArray<VexObservationDisagreement>.Empty;
}
var observationList = observations
.Where(o => o is not null)
.ToList();
if (observationList.Count < 2)
{
return ImmutableArray<VexObservationDisagreement>.Empty;
}
// Group by (vulnerabilityId, productKey)
var groups = observationList
.SelectMany(obs => obs.Statements.Select(stmt => (obs, stmt)))
.GroupBy(x => new
{
VulnerabilityId = Normalize(x.stmt.VulnerabilityId),
ProductKey = Normalize(x.stmt.ProductKey)
});
var disagreements = new List<VexObservationDisagreement>();
foreach (var group in groups)
{
var groupDisagreements = DetectGroupDisagreements(group.ToList());
disagreements.AddRange(groupDisagreements);
}
return disagreements
.Distinct(DisagreementComparer.Instance)
.OrderBy(d => d.ProviderId, StringComparer.OrdinalIgnoreCase)
.ThenBy(d => d.Status, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
/// <summary>
/// Analyzes observations for a specific linkset and returns disagreements.
/// </summary>
public ImmutableArray<VexObservationDisagreement> ComputeDisagreementsForLinkset(
IEnumerable<VexLinksetObservationRefModel> observationRefs)
{
if (observationRefs is null)
{
return ImmutableArray<VexObservationDisagreement>.Empty;
}
var refList = observationRefs
.Where(r => r is not null)
.ToList();
if (refList.Count < 2)
{
return ImmutableArray<VexObservationDisagreement>.Empty;
}
// Group by status to detect conflicts
var statusGroups = refList
.GroupBy(r => Normalize(r.Status))
.ToDictionary(g => g.Key, g => g.ToList(), StringComparer.OrdinalIgnoreCase);
if (statusGroups.Count <= 1)
{
// All providers agree on status
return ImmutableArray<VexObservationDisagreement>.Empty;
}
// Multiple statuses = disagreement
// Generate disagreement entries for each provider-status combination
var disagreements = refList
.Select(r => new VexObservationDisagreement(
providerId: r.ProviderId,
status: r.Status,
justification: null,
confidence: ComputeConfidence(r.Status, statusGroups)))
.Distinct(DisagreementComparer.Instance)
.OrderBy(d => d.ProviderId, StringComparer.OrdinalIgnoreCase)
.ThenBy(d => d.Status, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return disagreements;
}
/// <summary>
/// Updates a linkset with computed disagreements based on its observations.
/// Returns a new linkset with updated disagreements.
/// </summary>
public VexLinkset UpdateLinksetDisagreements(VexLinkset linkset)
{
ArgumentNullException.ThrowIfNull(linkset);
var disagreements = ComputeDisagreementsForLinkset(linkset.Observations);
return linkset.WithObservations(
linkset.Observations,
disagreements);
}
private static IEnumerable<VexObservationDisagreement> DetectGroupDisagreements(
List<(VexObservation obs, VexObservationStatement stmt)> group)
{
if (group.Count < 2)
{
yield break;
}
// Group by provider to get unique provider perspectives
var byProvider = group
.GroupBy(x => Normalize(x.obs.ProviderId))
.Select(g => new
{
ProviderId = g.Key,
Status = Normalize(g.First().stmt.Status.ToString()),
Justification = g.First().stmt.Justification?.ToString()
})
.ToList();
// Count status frequencies
var statusCounts = byProvider
.GroupBy(p => p.Status, StringComparer.OrdinalIgnoreCase)
.ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase);
// If all providers agree on status, no disagreement
if (statusCounts.Count <= 1)
{
yield break;
}
// Multiple statuses = disagreement
// Report each provider's position as a disagreement
var totalProviders = byProvider.Count;
foreach (var provider in byProvider)
{
var statusCount = statusCounts[provider.Status];
var confidence = (double)statusCount / totalProviders;
yield return new VexObservationDisagreement(
providerId: provider.ProviderId,
status: provider.Status,
justification: provider.Justification,
confidence: confidence);
}
}
private static double ComputeConfidence(
string status,
Dictionary<string, List<VexLinksetObservationRefModel>> statusGroups)
{
var totalCount = statusGroups.Values.Sum(g => g.Count);
if (totalCount == 0)
{
return 0.0;
}
if (statusGroups.TryGetValue(status, out var group))
{
return (double)group.Count / totalCount;
}
return 0.0;
}
private static string Normalize(string value)
{
return string.IsNullOrWhiteSpace(value)
? string.Empty
: value.Trim().ToLowerInvariant();
}
private sealed class DisagreementComparer : IEqualityComparer<VexObservationDisagreement>
{
public static readonly DisagreementComparer Instance = new();
public bool Equals(VexObservationDisagreement? x, VexObservationDisagreement? y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (x is null || y is null)
{
return false;
}
return string.Equals(x.ProviderId, y.ProviderId, StringComparison.OrdinalIgnoreCase)
&& string.Equals(x.Status, y.Status, StringComparison.OrdinalIgnoreCase)
&& string.Equals(x.Justification, y.Justification, StringComparison.OrdinalIgnoreCase);
}
public int GetHashCode(VexObservationDisagreement obj)
{
var hash = new HashCode();
hash.Add(obj.ProviderId, StringComparer.OrdinalIgnoreCase);
hash.Add(obj.Status, StringComparer.OrdinalIgnoreCase);
hash.Add(obj.Justification, StringComparer.OrdinalIgnoreCase);
return hash.ToHashCode();
}
}
}

View File

@@ -0,0 +1,418 @@
using System;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Excititor.Core.Orchestration;
/// <summary>
/// Client interface for the orchestrator worker SDK.
/// Emits heartbeats, progress, and artifact hashes for deterministic, restartable ingestion.
/// </summary>
public interface IVexWorkerOrchestratorClient
{
/// <summary>
/// Creates a new job context for a provider run.
/// </summary>
ValueTask<VexWorkerJobContext> StartJobAsync(
string tenant,
string connectorId,
string? checkpoint,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits a heartbeat for the given job.
/// </summary>
ValueTask SendHeartbeatAsync(
VexWorkerJobContext context,
VexWorkerHeartbeat heartbeat,
CancellationToken cancellationToken = default);
/// <summary>
/// Records an artifact produced during the job.
/// </summary>
ValueTask RecordArtifactAsync(
VexWorkerJobContext context,
VexWorkerArtifact artifact,
CancellationToken cancellationToken = default);
/// <summary>
/// Marks the job as completed successfully.
/// </summary>
ValueTask CompleteJobAsync(
VexWorkerJobContext context,
VexWorkerJobResult result,
CancellationToken cancellationToken = default);
/// <summary>
/// Marks the job as failed.
/// </summary>
ValueTask FailJobAsync(
VexWorkerJobContext context,
string errorCode,
string? errorMessage,
int? retryAfterSeconds,
CancellationToken cancellationToken = default);
/// <summary>
/// Marks the job as failed with a classified error.
/// </summary>
ValueTask FailJobAsync(
VexWorkerJobContext context,
VexWorkerError error,
CancellationToken cancellationToken = default);
/// <summary>
/// Polls for pending commands from the orchestrator.
/// </summary>
ValueTask<VexWorkerCommand?> GetPendingCommandAsync(
VexWorkerJobContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Acknowledges that a command has been processed.
/// </summary>
ValueTask AcknowledgeCommandAsync(
VexWorkerJobContext context,
long commandSequence,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves a checkpoint for resumable ingestion.
/// </summary>
ValueTask SaveCheckpointAsync(
VexWorkerJobContext context,
VexWorkerCheckpoint checkpoint,
CancellationToken cancellationToken = default);
/// <summary>
/// Loads the most recent checkpoint for a connector.
/// </summary>
ValueTask<VexWorkerCheckpoint?> LoadCheckpointAsync(
string connectorId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for an active worker job.
/// </summary>
public sealed record VexWorkerJobContext
{
public VexWorkerJobContext(
string tenant,
string connectorId,
Guid runId,
string? checkpoint,
DateTimeOffset startedAt)
{
Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant));
ConnectorId = EnsureNotNullOrWhiteSpace(connectorId, nameof(connectorId));
RunId = runId;
Checkpoint = checkpoint?.Trim();
StartedAt = startedAt;
}
public string Tenant { get; }
public string ConnectorId { get; }
public Guid RunId { get; }
public string? Checkpoint { get; }
public DateTimeOffset StartedAt { get; }
/// <summary>
/// Current sequence number for heartbeats.
/// </summary>
public long Sequence { get; private set; }
/// <summary>
/// Increments and returns the next sequence number.
/// </summary>
public long NextSequence() => ++Sequence;
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
}
/// <summary>
/// Heartbeat status for orchestrator reporting.
/// </summary>
public enum VexWorkerHeartbeatStatus
{
Starting,
Running,
Paused,
Throttled,
Backfill,
Failed,
Succeeded
}
/// <summary>
/// Heartbeat payload for orchestrator.
/// </summary>
public sealed record VexWorkerHeartbeat(
VexWorkerHeartbeatStatus Status,
int? Progress,
int? QueueDepth,
string? LastArtifactHash,
string? LastArtifactKind,
string? ErrorCode,
int? RetryAfterSeconds);
/// <summary>
/// Artifact produced during ingestion.
/// </summary>
public sealed record VexWorkerArtifact(
string Hash,
string Kind,
string? ProviderId,
string? DocumentId,
DateTimeOffset CreatedAt,
ImmutableDictionary<string, string>? Metadata = null);
/// <summary>
/// Result of a completed worker job.
/// </summary>
public sealed record VexWorkerJobResult(
int DocumentsProcessed,
int ClaimsGenerated,
string? LastCheckpoint,
string? LastArtifactHash,
DateTimeOffset CompletedAt,
ImmutableDictionary<string, string>? Metadata = null);
/// <summary>
/// Commands issued by the orchestrator to control worker behavior.
/// </summary>
public enum VexWorkerCommandKind
{
/// <summary>
/// Continue normal processing.
/// </summary>
Continue,
/// <summary>
/// Pause processing until resumed.
/// </summary>
Pause,
/// <summary>
/// Resume after a pause.
/// </summary>
Resume,
/// <summary>
/// Apply throttling constraints.
/// </summary>
Throttle,
/// <summary>
/// Retry the current operation.
/// </summary>
Retry,
/// <summary>
/// Abort the current job.
/// </summary>
Abort
}
/// <summary>
/// Command received from the orchestrator.
/// </summary>
public sealed record VexWorkerCommand(
VexWorkerCommandKind Kind,
long Sequence,
DateTimeOffset IssuedAt,
DateTimeOffset? ExpiresAt,
VexWorkerThrottleParams? Throttle,
string? Reason);
/// <summary>
/// Throttle parameters issued with a throttle command.
/// </summary>
public sealed record VexWorkerThrottleParams(
int? RequestsPerMinute,
int? BurstLimit,
int? CooldownSeconds);
/// <summary>
/// Classification of errors for orchestrator reporting.
/// </summary>
public enum VexWorkerErrorCategory
{
/// <summary>
/// Unknown or unclassified error.
/// </summary>
Unknown,
/// <summary>
/// Transient network or connectivity issues.
/// </summary>
Network,
/// <summary>
/// Authentication or authorization failure.
/// </summary>
Authorization,
/// <summary>
/// Rate limiting or throttling by upstream.
/// </summary>
RateLimited,
/// <summary>
/// Invalid or malformed data from upstream.
/// </summary>
DataFormat,
/// <summary>
/// Upstream service unavailable.
/// </summary>
ServiceUnavailable,
/// <summary>
/// Internal processing error.
/// </summary>
Internal,
/// <summary>
/// Configuration or setup error.
/// </summary>
Configuration,
/// <summary>
/// Operation cancelled.
/// </summary>
Cancelled,
/// <summary>
/// Operation timed out.
/// </summary>
Timeout
}
/// <summary>
/// Classified error for orchestrator reporting.
/// </summary>
public sealed record VexWorkerError
{
public VexWorkerError(
string code,
VexWorkerErrorCategory category,
string message,
bool retryable,
int? retryAfterSeconds = null,
string? stage = null,
ImmutableDictionary<string, string>? details = null)
{
Code = code ?? throw new ArgumentNullException(nameof(code));
Category = category;
Message = message ?? string.Empty;
Retryable = retryable;
RetryAfterSeconds = retryAfterSeconds;
Stage = stage;
Details = details ?? ImmutableDictionary<string, string>.Empty;
}
public string Code { get; }
public VexWorkerErrorCategory Category { get; }
public string Message { get; }
public bool Retryable { get; }
public int? RetryAfterSeconds { get; }
public string? Stage { get; }
public ImmutableDictionary<string, string> Details { get; }
/// <summary>
/// Creates a transient network error.
/// </summary>
public static VexWorkerError Network(string message, int? retryAfterSeconds = 30)
=> new("NETWORK_ERROR", VexWorkerErrorCategory.Network, message, retryable: true, retryAfterSeconds);
/// <summary>
/// Creates an authorization error.
/// </summary>
public static VexWorkerError Authorization(string message)
=> new("AUTH_ERROR", VexWorkerErrorCategory.Authorization, message, retryable: false);
/// <summary>
/// Creates a rate-limited error.
/// </summary>
public static VexWorkerError RateLimited(string message, int retryAfterSeconds)
=> new("RATE_LIMITED", VexWorkerErrorCategory.RateLimited, message, retryable: true, retryAfterSeconds);
/// <summary>
/// Creates a service unavailable error.
/// </summary>
public static VexWorkerError ServiceUnavailable(string message, int? retryAfterSeconds = 60)
=> new("SERVICE_UNAVAILABLE", VexWorkerErrorCategory.ServiceUnavailable, message, retryable: true, retryAfterSeconds);
/// <summary>
/// Creates a data format error.
/// </summary>
public static VexWorkerError DataFormat(string message)
=> new("DATA_FORMAT_ERROR", VexWorkerErrorCategory.DataFormat, message, retryable: false);
/// <summary>
/// Creates an internal error.
/// </summary>
public static VexWorkerError Internal(string message)
=> new("INTERNAL_ERROR", VexWorkerErrorCategory.Internal, message, retryable: false);
/// <summary>
/// Creates a timeout error.
/// </summary>
public static VexWorkerError Timeout(string message, int? retryAfterSeconds = 30)
=> new("TIMEOUT", VexWorkerErrorCategory.Timeout, message, retryable: true, retryAfterSeconds);
/// <summary>
/// Creates a cancelled error.
/// </summary>
public static VexWorkerError Cancelled(string message)
=> new("CANCELLED", VexWorkerErrorCategory.Cancelled, message, retryable: false);
/// <summary>
/// Classifies an exception into an appropriate error.
/// </summary>
public static VexWorkerError FromException(Exception ex, string? stage = null)
{
return ex switch
{
OperationCanceledException => Cancelled(ex.Message),
TimeoutException => Timeout(ex.Message),
System.Net.Http.HttpRequestException httpEx when httpEx.StatusCode == System.Net.HttpStatusCode.TooManyRequests
=> RateLimited(ex.Message, 60),
System.Net.Http.HttpRequestException httpEx when httpEx.StatusCode == System.Net.HttpStatusCode.Unauthorized
|| httpEx.StatusCode == System.Net.HttpStatusCode.Forbidden
=> Authorization(ex.Message),
System.Net.Http.HttpRequestException httpEx when httpEx.StatusCode == System.Net.HttpStatusCode.ServiceUnavailable
|| httpEx.StatusCode == System.Net.HttpStatusCode.BadGateway
|| httpEx.StatusCode == System.Net.HttpStatusCode.GatewayTimeout
=> ServiceUnavailable(ex.Message),
System.Net.Http.HttpRequestException => Network(ex.Message),
System.Net.Sockets.SocketException => Network(ex.Message),
System.IO.IOException => Network(ex.Message),
System.Text.Json.JsonException => DataFormat(ex.Message),
FormatException => DataFormat(ex.Message),
InvalidOperationException => Internal(ex.Message),
_ => new VexWorkerError("UNKNOWN_ERROR", VexWorkerErrorCategory.Unknown, ex.Message, retryable: false, stage: stage)
};
}
}
/// <summary>
/// Checkpoint state for resumable ingestion.
/// </summary>
public sealed record VexWorkerCheckpoint(
string ConnectorId,
string? Cursor,
DateTimeOffset? LastProcessedAt,
ImmutableArray<string> ProcessedDigests,
ImmutableDictionary<string, string> ResumeTokens)
{
public static VexWorkerCheckpoint Empty(string connectorId) => new(
connectorId,
Cursor: null,
LastProcessedAt: null,
ProcessedDigests: ImmutableArray<string>.Empty,
ResumeTokens: ImmutableDictionary<string, string>.Empty);
}

View File

@@ -124,7 +124,16 @@ public sealed class OpenVexExporter : IVexExporter
SourceUri: source.DocumentSource.ToString(),
Detail: source.Detail,
FirstObserved: source.FirstSeen.UtcDateTime.ToString("O", CultureInfo.InvariantCulture),
LastObserved: source.LastSeen.UtcDateTime.ToString("O", CultureInfo.InvariantCulture)))
LastObserved: source.LastSeen.UtcDateTime.ToString("O", CultureInfo.InvariantCulture),
// VEX Lens enrichment fields
IssuerHint: source.IssuerHint,
SignatureType: source.SignatureType,
KeyId: source.KeyId,
TransparencyLogRef: source.TransparencyLogRef,
TrustWeight: source.TrustWeight,
TrustTier: source.TrustTier,
StalenessSeconds: source.StalenessSeconds,
ProductTreeSnippet: source.ProductTreeSnippet))
.ToImmutableArray();
var statementId = FormattableString.Invariant($"{statement.VulnerabilityId}#{NormalizeProductKey(statement.Product.Key)}");
@@ -200,6 +209,9 @@ internal sealed record OpenVexExportProduct(
[property: JsonPropertyName("purl"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? Purl,
[property: JsonPropertyName("cpe"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? Cpe);
/// <summary>
/// OpenVEX source entry with VEX Lens enrichment fields for consensus computation.
/// </summary>
internal sealed record OpenVexExportSource(
[property: JsonPropertyName("provider")] string Provider,
[property: JsonPropertyName("status")] string Status,
@@ -208,7 +220,16 @@ internal sealed record OpenVexExportSource(
[property: JsonPropertyName("source_uri")] string SourceUri,
[property: JsonPropertyName("detail"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? Detail,
[property: JsonPropertyName("first_observed")] string FirstObserved,
[property: JsonPropertyName("last_observed")] string LastObserved);
[property: JsonPropertyName("last_observed")] string LastObserved,
// VEX Lens enrichment fields for consensus without callback to Excititor
[property: JsonPropertyName("issuer_hint"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? IssuerHint,
[property: JsonPropertyName("signature_type"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? SignatureType,
[property: JsonPropertyName("key_id"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? KeyId,
[property: JsonPropertyName("transparency_log_ref"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? TransparencyLogRef,
[property: JsonPropertyName("trust_weight"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] decimal? TrustWeight,
[property: JsonPropertyName("trust_tier"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? TrustTier,
[property: JsonPropertyName("staleness_seconds"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] long? StalenessSeconds,
[property: JsonPropertyName("product_tree_snippet"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? ProductTreeSnippet);
internal sealed record OpenVexExportMetadata(
[property: JsonPropertyName("generated_at")] string GeneratedAt,

View File

@@ -169,17 +169,60 @@ public static class OpenVexStatementMerger
private static ImmutableArray<OpenVexSourceEntry> BuildSources(ImmutableArray<VexClaim> claims)
{
var builder = ImmutableArray.CreateBuilder<OpenVexSourceEntry>(claims.Length);
var now = DateTimeOffset.UtcNow;
foreach (var claim in claims)
{
// Extract VEX Lens enrichment from signature metadata
var signature = claim.Document.Signature;
var trust = signature?.Trust;
// Compute staleness from trust metadata retrieval time or last seen
long? stalenessSeconds = null;
if (trust?.RetrievedAtUtc is { } retrievedAt)
{
stalenessSeconds = (long)Math.Ceiling((now - retrievedAt).TotalSeconds);
}
else if (signature?.VerifiedAt is { } verifiedAt)
{
stalenessSeconds = (long)Math.Ceiling((now - verifiedAt).TotalSeconds);
}
// Extract product tree snippet from additional metadata (if present)
string? productTreeSnippet = null;
if (claim.AdditionalMetadata.TryGetValue("csaf.product_tree", out var productTree))
{
productTreeSnippet = productTree;
}
// Derive trust tier from issuer or provider type
string? trustTier = null;
if (trust is not null)
{
trustTier = trust.TenantOverrideApplied ? "tenant-override" : DeriveIssuerTier(trust.IssuerId);
}
else if (claim.AdditionalMetadata.TryGetValue("issuer.tier", out var tier))
{
trustTier = tier;
}
builder.Add(new OpenVexSourceEntry(
claim.ProviderId,
claim.Status,
claim.Justification,
claim.Document.Digest,
claim.Document.SourceUri,
claim.Detail,
claim.FirstSeen,
claim.LastSeen));
providerId: claim.ProviderId,
status: claim.Status,
justification: claim.Justification,
documentDigest: claim.Document.Digest,
documentSource: claim.Document.SourceUri,
detail: claim.Detail,
firstSeen: claim.FirstSeen,
lastSeen: claim.LastSeen,
issuerHint: signature?.Issuer ?? signature?.Subject,
signatureType: signature?.Type,
keyId: signature?.KeyId,
transparencyLogRef: signature?.TransparencyLogReference,
trustWeight: trust?.EffectiveWeight,
trustTier: trustTier,
stalenessSeconds: stalenessSeconds,
productTreeSnippet: productTreeSnippet));
}
return builder
@@ -189,6 +232,34 @@ public static class OpenVexStatementMerger
.ToImmutableArray();
}
private static string? DeriveIssuerTier(string issuerId)
{
if (string.IsNullOrWhiteSpace(issuerId))
{
return null;
}
// Common issuer tier patterns
var lowerIssuerId = issuerId.ToLowerInvariant();
if (lowerIssuerId.Contains("vendor") || lowerIssuerId.Contains("upstream"))
{
return "vendor";
}
if (lowerIssuerId.Contains("distro") || lowerIssuerId.Contains("rhel") ||
lowerIssuerId.Contains("ubuntu") || lowerIssuerId.Contains("debian"))
{
return "distro-trusted";
}
if (lowerIssuerId.Contains("community") || lowerIssuerId.Contains("oss"))
{
return "community";
}
return "other";
}
private static VexProduct MergeProduct(ImmutableArray<VexClaim> claims)
{
var key = claims[0].Product.Key;
@@ -266,17 +337,85 @@ public sealed record OpenVexMergedStatement(
DateTimeOffset FirstObserved,
DateTimeOffset LastObserved);
public sealed record OpenVexSourceEntry(
string ProviderId,
VexClaimStatus Status,
VexJustification? Justification,
string DocumentDigest,
Uri DocumentSource,
string? Detail,
DateTimeOffset FirstSeen,
DateTimeOffset LastSeen)
/// <summary>
/// Represents a merged VEX source entry with enrichment for VEX Lens consumption.
/// </summary>
public sealed record OpenVexSourceEntry
{
public string DocumentDigest { get; } = string.IsNullOrWhiteSpace(DocumentDigest)
? throw new ArgumentException("Document digest must be provided.", nameof(DocumentDigest))
: DocumentDigest.Trim();
public OpenVexSourceEntry(
string providerId,
VexClaimStatus status,
VexJustification? justification,
string documentDigest,
Uri documentSource,
string? detail,
DateTimeOffset firstSeen,
DateTimeOffset lastSeen,
string? issuerHint = null,
string? signatureType = null,
string? keyId = null,
string? transparencyLogRef = null,
decimal? trustWeight = null,
string? trustTier = null,
long? stalenessSeconds = null,
string? productTreeSnippet = null)
{
if (string.IsNullOrWhiteSpace(documentDigest))
{
throw new ArgumentException("Document digest must be provided.", nameof(documentDigest));
}
ProviderId = providerId;
Status = status;
Justification = justification;
DocumentDigest = documentDigest.Trim();
DocumentSource = documentSource;
Detail = detail;
FirstSeen = firstSeen;
LastSeen = lastSeen;
// VEX Lens enrichment fields
IssuerHint = string.IsNullOrWhiteSpace(issuerHint) ? null : issuerHint.Trim();
SignatureType = string.IsNullOrWhiteSpace(signatureType) ? null : signatureType.Trim();
KeyId = string.IsNullOrWhiteSpace(keyId) ? null : keyId.Trim();
TransparencyLogRef = string.IsNullOrWhiteSpace(transparencyLogRef) ? null : transparencyLogRef.Trim();
TrustWeight = trustWeight;
TrustTier = string.IsNullOrWhiteSpace(trustTier) ? null : trustTier.Trim();
StalenessSeconds = stalenessSeconds;
ProductTreeSnippet = string.IsNullOrWhiteSpace(productTreeSnippet) ? null : productTreeSnippet.Trim();
}
public string ProviderId { get; }
public VexClaimStatus Status { get; }
public VexJustification? Justification { get; }
public string DocumentDigest { get; }
public Uri DocumentSource { get; }
public string? Detail { get; }
public DateTimeOffset FirstSeen { get; }
public DateTimeOffset LastSeen { get; }
// VEX Lens enrichment fields for consensus computation
/// <summary>Issuer identity/hint (e.g., vendor name, distro-trusted) for trust weighting.</summary>
public string? IssuerHint { get; }
/// <summary>Cryptographic signature type (jws, pgp, cosign, etc.).</summary>
public string? SignatureType { get; }
/// <summary>Key identifier used for signature verification.</summary>
public string? KeyId { get; }
/// <summary>Transparency log reference (e.g., Rekor URL) for attestation verification.</summary>
public string? TransparencyLogRef { get; }
/// <summary>Trust weight (0-1) from issuer directory for consensus calculation.</summary>
public decimal? TrustWeight { get; }
/// <summary>Trust tier label (vendor, distro-trusted, community, etc.).</summary>
public string? TrustTier { get; }
/// <summary>Seconds since the document was last verified/retrieved.</summary>
public long? StalenessSeconds { get; }
/// <summary>Product tree snippet (JSON) from CSAF documents for product matching.</summary>
public string? ProductTreeSnippet { get; }
}

View File

@@ -17,17 +17,17 @@ public interface IVexProviderStore
ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConsensusStore
{
ValueTask<VexConsensus?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexConsensus>> FindByVulnerabilityAsync(string vulnerabilityId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken, IClientSessionHandle? session = null);
IAsyncEnumerable<VexConsensus> FindCalculatedBeforeAsync(DateTimeOffset cutoff, int batchSize, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> throw new NotSupportedException();
}
public interface IVexConsensusStore
{
ValueTask<VexConsensus?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexConsensus>> FindByVulnerabilityAsync(string vulnerabilityId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken, IClientSessionHandle? session = null);
IAsyncEnumerable<VexConsensus> FindCalculatedBeforeAsync(DateTimeOffset cutoff, int batchSize, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> throw new NotSupportedException();
}
public interface IVexClaimStore
{
@@ -44,7 +44,12 @@ public sealed record VexConnectorState(
DateTimeOffset? LastSuccessAt,
int FailureCount,
DateTimeOffset? NextEligibleRun,
string? LastFailureReason)
string? LastFailureReason,
DateTimeOffset? LastHeartbeatAt = null,
string? LastHeartbeatStatus = null,
string? LastArtifactHash = null,
string? LastArtifactKind = null,
string? LastCheckpoint = null)
{
public VexConnectorState(
string connectorId,
@@ -58,30 +63,35 @@ public sealed record VexConnectorState(
LastSuccessAt: null,
FailureCount: 0,
NextEligibleRun: null,
LastFailureReason: null)
LastFailureReason: null,
LastHeartbeatAt: null,
LastHeartbeatStatus: null,
LastArtifactHash: null,
LastArtifactKind: null,
LastCheckpoint: null)
{
}
}
public interface IVexConnectorStateRepository
{
ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexConnectorState>> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConsensusHoldStore
{
ValueTask<VexConsensusHold?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensusHold hold, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask RemoveAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
IAsyncEnumerable<VexConsensusHold> FindEligibleAsync(DateTimeOffset asOf, int batchSize, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConnectorStateRepository
{
ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexConnectorState>> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConsensusHoldStore
{
ValueTask<VexConsensusHold?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensusHold hold, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask RemoveAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
IAsyncEnumerable<VexConsensusHold> FindEligibleAsync(DateTimeOffset asOf, int batchSize, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexCacheIndex
{

View File

@@ -0,0 +1,137 @@
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Excititor.Storage.Mongo.Migrations;
/// <summary>
/// Adds idempotency indexes to the vex_raw collection to enforce content-addressed storage.
/// Ensures that:
/// 1. Each document is uniquely identified by its content digest
/// 2. Provider+Source combinations are unique per digest
/// 3. Supports efficient queries for evidence retrieval
/// </summary>
/// <remarks>
/// Rollback: Run db.vex_raw.dropIndex("idx_provider_sourceUri_digest_unique")
/// and db.vex_raw.dropIndex("idx_digest_providerId") to reverse this migration.
/// </remarks>
internal sealed class VexRawIdempotencyIndexMigration : IVexMongoMigration
{
public string Id => "20251127-vex-raw-idempotency-indexes";
public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(database);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw);
// Index 1: Unique constraint on providerId + sourceUri + digest
// Ensures the same document from the same provider/source is only stored once
var providerSourceDigestIndex = new BsonDocument
{
{ "providerId", 1 },
{ "sourceUri", 1 },
{ "digest", 1 }
};
var uniqueIndexModel = new CreateIndexModel<BsonDocument>(
providerSourceDigestIndex,
new CreateIndexOptions
{
Unique = true,
Name = "idx_provider_sourceUri_digest_unique",
Background = true
});
// Index 2: Compound index for efficient evidence queries by digest + provider
var digestProviderIndex = new BsonDocument
{
{ "digest", 1 },
{ "providerId", 1 }
};
var queryIndexModel = new CreateIndexModel<BsonDocument>(
digestProviderIndex,
new CreateIndexOptions
{
Name = "idx_digest_providerId",
Background = true
});
// Index 3: TTL index candidate for future cleanup (optional staleness tracking)
var retrievedAtIndex = new BsonDocument
{
{ "retrievedAt", 1 }
};
var retrievedAtIndexModel = new CreateIndexModel<BsonDocument>(
retrievedAtIndex,
new CreateIndexOptions
{
Name = "idx_retrievedAt",
Background = true
});
// Create all indexes
await collection.Indexes.CreateManyAsync(
new[] { uniqueIndexModel, queryIndexModel, retrievedAtIndexModel },
cancellationToken).ConfigureAwait(false);
}
}
/// <summary>
/// Extension methods for idempotency index management.
/// </summary>
public static class VexRawIdempotencyIndexExtensions
{
/// <summary>
/// Drops the idempotency indexes (for rollback).
/// </summary>
public static async Task RollbackIdempotencyIndexesAsync(
this IMongoDatabase database,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(database);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw);
var indexNames = new[]
{
"idx_provider_sourceUri_digest_unique",
"idx_digest_providerId",
"idx_retrievedAt"
};
foreach (var indexName in indexNames)
{
try
{
await collection.Indexes.DropOneAsync(indexName, cancellationToken).ConfigureAwait(false);
}
catch (MongoCommandException ex) when (ex.CodeName == "IndexNotFound")
{
// Index doesn't exist, skip
}
}
}
/// <summary>
/// Verifies that idempotency indexes exist.
/// </summary>
public static async Task<bool> VerifyIdempotencyIndexesExistAsync(
this IMongoDatabase database,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(database);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw);
var cursor = await collection.Indexes.ListAsync(cancellationToken).ConfigureAwait(false);
var indexes = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
var indexNames = indexes.Select(i => i.GetValue("name", "").AsString).ToHashSet();
return indexNames.Contains("idx_provider_sourceUri_digest_unique") &&
indexNames.Contains("idx_digest_providerId");
}
}

View File

@@ -25,15 +25,17 @@ internal sealed class VexRawSchemaMigration : IVexMongoMigration
if (!exists)
{
await database.CreateCollectionAsync(
VexMongoCollectionNames.Raw,
new CreateCollectionOptions
{
Validator = validator,
ValidationAction = DocumentValidationAction.Warn,
ValidationLevel = DocumentValidationLevel.Moderate,
},
cancellationToken).ConfigureAwait(false);
// In MongoDB.Driver 3.x, CreateCollectionOptions doesn't support Validator directly.
// Use the create command instead.
var createCommand = new BsonDocument
{
{ "create", VexMongoCollectionNames.Raw },
{ "validator", validator },
{ "validationAction", "warn" },
{ "validationLevel", "moderate" }
};
await database.RunCommandAsync<BsonDocument>(createCommand, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return;
}

View File

@@ -0,0 +1,71 @@
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
namespace StellaOps.Excititor.Storage.Mongo.Migrations;
/// <summary>
/// Migration that creates indexes for the vex.timeline_events collection.
/// </summary>
internal sealed class VexTimelineEventIndexMigration : IVexMongoMigration
{
public string Id => "20251127-timeline-events";
public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(database);
var collection = database.GetCollection<VexTimelineEventRecord>(VexMongoCollectionNames.TimelineEvents);
// Unique index on tenant + event ID
var tenantEventIdIndex = Builders<VexTimelineEventRecord>.IndexKeys
.Ascending(x => x.Tenant)
.Ascending(x => x.Id);
// Index for querying by time range (descending for recent-first queries)
var tenantTimeIndex = Builders<VexTimelineEventRecord>.IndexKeys
.Ascending(x => x.Tenant)
.Descending(x => x.CreatedAt);
// Index for querying by trace ID
var tenantTraceIndex = Builders<VexTimelineEventRecord>.IndexKeys
.Ascending(x => x.Tenant)
.Ascending(x => x.TraceId)
.Ascending(x => x.CreatedAt);
// Index for querying by provider
var tenantProviderIndex = Builders<VexTimelineEventRecord>.IndexKeys
.Ascending(x => x.Tenant)
.Ascending(x => x.ProviderId)
.Descending(x => x.CreatedAt);
// Index for querying by event type
var tenantEventTypeIndex = Builders<VexTimelineEventRecord>.IndexKeys
.Ascending(x => x.Tenant)
.Ascending(x => x.EventType)
.Descending(x => x.CreatedAt);
// TTL index for automatic cleanup (30 days by default)
// Uncomment if timeline events should expire:
// var ttlIndex = Builders<VexTimelineEventRecord>.IndexKeys.Ascending(x => x.CreatedAt);
// var ttlOptions = new CreateIndexOptions { ExpireAfter = TimeSpan.FromDays(30) };
await Task.WhenAll(
collection.Indexes.CreateOneAsync(
new CreateIndexModel<VexTimelineEventRecord>(tenantEventIdIndex, new CreateIndexOptions { Unique = true }),
cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(
new CreateIndexModel<VexTimelineEventRecord>(tenantTimeIndex),
cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(
new CreateIndexModel<VexTimelineEventRecord>(tenantTraceIndex),
cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(
new CreateIndexModel<VexTimelineEventRecord>(tenantProviderIndex),
cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(
new CreateIndexModel<VexTimelineEventRecord>(tenantEventTypeIndex),
cancellationToken: cancellationToken)
).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,84 @@
using MongoDB.Driver;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Storage.Mongo;
/// <summary>
/// MongoDB implementation of <see cref="IVexLinksetEventPublisher"/>.
/// Events are persisted to the vex.linkset_events collection for replay and audit.
/// </summary>
internal sealed class MongoVexLinksetEventPublisher : IVexLinksetEventPublisher
{
private readonly IMongoCollection<VexLinksetEventRecord> _collection;
public MongoVexLinksetEventPublisher(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
_collection = database.GetCollection<VexLinksetEventRecord>(VexMongoCollectionNames.LinksetEvents);
}
public async Task PublishAsync(VexLinksetUpdatedEvent @event, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(@event);
var record = ToRecord(@event);
await _collection.InsertOneAsync(record, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
public async Task PublishManyAsync(IEnumerable<VexLinksetUpdatedEvent> events, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(events);
var records = events
.Where(e => e is not null)
.Select(ToRecord)
.ToList();
if (records.Count == 0)
{
return;
}
var options = new InsertManyOptions { IsOrdered = false };
await _collection.InsertManyAsync(records, options, cancellationToken)
.ConfigureAwait(false);
}
private static VexLinksetEventRecord ToRecord(VexLinksetUpdatedEvent @event)
{
var eventId = $"{@event.LinksetId}:{@event.CreatedAtUtc.UtcTicks}";
return new VexLinksetEventRecord
{
Id = eventId,
EventType = @event.EventType,
Tenant = @event.Tenant.ToLowerInvariant(),
LinksetId = @event.LinksetId,
VulnerabilityId = @event.VulnerabilityId,
ProductKey = @event.ProductKey,
Observations = @event.Observations
.Select(o => new VexLinksetEventObservationRecord
{
ObservationId = o.ObservationId,
ProviderId = o.ProviderId,
Status = o.Status,
Confidence = o.Confidence
})
.ToList(),
Disagreements = @event.Disagreements
.Select(d => new VexLinksetDisagreementRecord
{
ProviderId = d.ProviderId,
Status = d.Status,
Justification = d.Justification,
Confidence = d.Confidence
})
.ToList(),
CreatedAtUtc = @event.CreatedAtUtc.UtcDateTime,
PublishedAtUtc = DateTime.UtcNow,
ConflictCount = @event.Disagreements.Length,
ObservationCount = @event.Observations.Length
};
}
}

View File

@@ -0,0 +1,339 @@
using System.Collections.Immutable;
using MongoDB.Driver;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Storage.Mongo;
internal sealed class MongoVexLinksetStore : IVexLinksetStore
{
private readonly IMongoCollection<VexLinksetRecord> _collection;
public MongoVexLinksetStore(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
_collection = database.GetCollection<VexLinksetRecord>(VexMongoCollectionNames.Linksets);
}
public async ValueTask<bool> InsertAsync(
VexLinkset linkset,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(linkset);
var record = ToRecord(linkset);
try
{
await _collection.InsertOneAsync(record, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return true;
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
return false;
}
}
public async ValueTask<bool> UpsertAsync(
VexLinkset linkset,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(linkset);
var record = ToRecord(linkset);
var normalizedTenant = NormalizeTenant(linkset.Tenant);
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.Eq(r => r.LinksetId, linkset.LinksetId));
var options = new ReplaceOptions { IsUpsert = true };
var result = await _collection
.ReplaceOneAsync(filter, record, options, cancellationToken)
.ConfigureAwait(false);
return result.UpsertedId is not null;
}
public async ValueTask<VexLinkset?> GetByIdAsync(
string tenant,
string linksetId,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedId = linksetId?.Trim() ?? throw new ArgumentNullException(nameof(linksetId));
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.Eq(r => r.LinksetId, normalizedId));
var record = await _collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return record is null ? null : ToModel(record);
}
public async ValueTask<VexLinkset> GetOrCreateAsync(
string tenant,
string vulnerabilityId,
string productKey,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedVuln = vulnerabilityId?.Trim() ?? throw new ArgumentNullException(nameof(vulnerabilityId));
var normalizedProduct = productKey?.Trim() ?? throw new ArgumentNullException(nameof(productKey));
var linksetId = VexLinkset.CreateLinksetId(normalizedTenant, normalizedVuln, normalizedProduct);
var existing = await GetByIdAsync(normalizedTenant, linksetId, cancellationToken).ConfigureAwait(false);
if (existing is not null)
{
return existing;
}
var newLinkset = new VexLinkset(
linksetId,
normalizedTenant,
normalizedVuln,
normalizedProduct,
observations: Array.Empty<VexLinksetObservationRefModel>(),
disagreements: null,
createdAt: DateTimeOffset.UtcNow,
updatedAt: DateTimeOffset.UtcNow);
try
{
await InsertAsync(newLinkset, cancellationToken).ConfigureAwait(false);
return newLinkset;
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
// Race condition - another process created it. Fetch and return.
var created = await GetByIdAsync(normalizedTenant, linksetId, cancellationToken).ConfigureAwait(false);
return created ?? newLinkset;
}
}
public async ValueTask<IReadOnlyList<VexLinkset>> FindByVulnerabilityAsync(
string tenant,
string vulnerabilityId,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedVuln = vulnerabilityId?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(vulnerabilityId));
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.Eq(r => r.VulnerabilityId, normalizedVuln));
var records = await _collection
.Find(filter)
.Sort(Builders<VexLinksetRecord>.Sort.Descending(r => r.UpdatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<VexLinkset>> FindByProductKeyAsync(
string tenant,
string productKey,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedProduct = productKey?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(productKey));
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.Eq(r => r.ProductKey, normalizedProduct));
var records = await _collection
.Find(filter)
.Sort(Builders<VexLinksetRecord>.Sort.Descending(r => r.UpdatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<VexLinkset>> FindWithConflictsAsync(
string tenant,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.SizeGt(r => r.Disagreements, 0));
var records = await _collection
.Find(filter)
.Sort(Builders<VexLinksetRecord>.Sort.Descending(r => r.UpdatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<VexLinkset>> FindByProviderAsync(
string tenant,
string providerId,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedProvider = providerId?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(providerId));
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.AnyEq(r => r.ProviderIds, normalizedProvider));
var records = await _collection
.Find(filter)
.Sort(Builders<VexLinksetRecord>.Sort.Descending(r => r.UpdatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<bool> DeleteAsync(
string tenant,
string linksetId,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedId = linksetId?.Trim() ?? throw new ArgumentNullException(nameof(linksetId));
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.Eq(r => r.LinksetId, normalizedId));
var result = await _collection
.DeleteOneAsync(filter, cancellationToken)
.ConfigureAwait(false);
return result.DeletedCount > 0;
}
public async ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var filter = Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant);
return await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
public async ValueTask<long> CountWithConflictsAsync(
string tenant,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var filter = Builders<VexLinksetRecord>.Filter.And(
Builders<VexLinksetRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexLinksetRecord>.Filter.SizeGt(r => r.Disagreements, 0));
return await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static string NormalizeTenant(string tenant)
{
if (string.IsNullOrWhiteSpace(tenant))
{
throw new ArgumentException("tenant is required", nameof(tenant));
}
return tenant.Trim().ToLowerInvariant();
}
private static VexLinksetRecord ToRecord(VexLinkset linkset)
{
return new VexLinksetRecord
{
Id = linkset.LinksetId,
Tenant = linkset.Tenant.ToLowerInvariant(),
LinksetId = linkset.LinksetId,
VulnerabilityId = linkset.VulnerabilityId.ToLowerInvariant(),
ProductKey = linkset.ProductKey.ToLowerInvariant(),
ProviderIds = linkset.ProviderIds.ToList(),
Statuses = linkset.Statuses.ToList(),
CreatedAt = linkset.CreatedAt.UtcDateTime,
UpdatedAt = linkset.UpdatedAt.UtcDateTime,
Observations = linkset.Observations.Select(ToObservationRecord).ToList(),
Disagreements = linkset.Disagreements.Select(ToDisagreementRecord).ToList()
};
}
private static VexObservationLinksetObservationRecord ToObservationRecord(VexLinksetObservationRefModel obs)
{
return new VexObservationLinksetObservationRecord
{
ObservationId = obs.ObservationId,
ProviderId = obs.ProviderId,
Status = obs.Status,
Confidence = obs.Confidence
};
}
private static VexLinksetDisagreementRecord ToDisagreementRecord(VexObservationDisagreement disagreement)
{
return new VexLinksetDisagreementRecord
{
ProviderId = disagreement.ProviderId,
Status = disagreement.Status,
Justification = disagreement.Justification,
Confidence = disagreement.Confidence
};
}
private static VexLinkset ToModel(VexLinksetRecord record)
{
var observations = record.Observations?
.Where(o => o is not null)
.Select(o => new VexLinksetObservationRefModel(
o.ObservationId,
o.ProviderId,
o.Status,
o.Confidence))
.ToImmutableArray() ?? ImmutableArray<VexLinksetObservationRefModel>.Empty;
var disagreements = record.Disagreements?
.Where(d => d is not null)
.Select(d => new VexObservationDisagreement(
d.ProviderId,
d.Status,
d.Justification,
d.Confidence))
.ToImmutableArray() ?? ImmutableArray<VexObservationDisagreement>.Empty;
return new VexLinkset(
linksetId: record.LinksetId,
tenant: record.Tenant,
vulnerabilityId: record.VulnerabilityId,
productKey: record.ProductKey,
observations: observations,
disagreements: disagreements,
createdAt: new DateTimeOffset(record.CreatedAt, TimeSpan.Zero),
updatedAt: new DateTimeOffset(record.UpdatedAt, TimeSpan.Zero));
}
}

View File

@@ -0,0 +1,398 @@
using System.Collections.Immutable;
using System.Text.Json.Nodes;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Storage.Mongo;
internal sealed class MongoVexObservationStore : IVexObservationStore
{
private readonly IMongoCollection<VexObservationRecord> _collection;
public MongoVexObservationStore(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
_collection = database.GetCollection<VexObservationRecord>(VexMongoCollectionNames.Observations);
}
public async ValueTask<bool> InsertAsync(
VexObservation observation,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(observation);
var record = ToRecord(observation);
try
{
await _collection.InsertOneAsync(record, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return true;
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
return false;
}
}
public async ValueTask<bool> UpsertAsync(
VexObservation observation,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(observation);
var record = ToRecord(observation);
var normalizedTenant = NormalizeTenant(observation.Tenant);
var filter = Builders<VexObservationRecord>.Filter.And(
Builders<VexObservationRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexObservationRecord>.Filter.Eq(r => r.ObservationId, observation.ObservationId));
var options = new ReplaceOptions { IsUpsert = true };
var result = await _collection
.ReplaceOneAsync(filter, record, options, cancellationToken)
.ConfigureAwait(false);
return result.UpsertedId is not null;
}
public async ValueTask<int> InsertManyAsync(
string tenant,
IEnumerable<VexObservation> observations,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var records = observations
.Where(o => o is not null && string.Equals(NormalizeTenant(o.Tenant), normalizedTenant, StringComparison.Ordinal))
.Select(ToRecord)
.ToList();
if (records.Count == 0)
{
return 0;
}
var options = new InsertManyOptions { IsOrdered = false };
try
{
await _collection.InsertManyAsync(records, options, cancellationToken)
.ConfigureAwait(false);
return records.Count;
}
catch (MongoBulkWriteException<VexObservationRecord> ex)
{
// Return the count of successful inserts
var duplicates = ex.WriteErrors?.Count(e => e.Category == ServerErrorCategory.DuplicateKey) ?? 0;
return records.Count - duplicates;
}
}
public async ValueTask<VexObservation?> GetByIdAsync(
string tenant,
string observationId,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedId = observationId?.Trim() ?? throw new ArgumentNullException(nameof(observationId));
var filter = Builders<VexObservationRecord>.Filter.And(
Builders<VexObservationRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexObservationRecord>.Filter.Eq(r => r.ObservationId, normalizedId));
var record = await _collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return record is null ? null : ToModel(record);
}
public async ValueTask<IReadOnlyList<VexObservation>> FindByVulnerabilityAndProductAsync(
string tenant,
string vulnerabilityId,
string productKey,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedVuln = vulnerabilityId?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(vulnerabilityId));
var normalizedProduct = productKey?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(productKey));
var filter = Builders<VexObservationRecord>.Filter.And(
Builders<VexObservationRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexObservationRecord>.Filter.Eq(r => r.VulnerabilityId, normalizedVuln),
Builders<VexObservationRecord>.Filter.Eq(r => r.ProductKey, normalizedProduct));
var records = await _collection
.Find(filter)
.Sort(Builders<VexObservationRecord>.Sort.Descending(r => r.CreatedAt))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<VexObservation>> FindByProviderAsync(
string tenant,
string providerId,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedProvider = providerId?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(providerId));
var filter = Builders<VexObservationRecord>.Filter.And(
Builders<VexObservationRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexObservationRecord>.Filter.Eq(r => r.ProviderId, normalizedProvider));
var records = await _collection
.Find(filter)
.Sort(Builders<VexObservationRecord>.Sort.Descending(r => r.CreatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<bool> DeleteAsync(
string tenant,
string observationId,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedId = observationId?.Trim() ?? throw new ArgumentNullException(nameof(observationId));
var filter = Builders<VexObservationRecord>.Filter.And(
Builders<VexObservationRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexObservationRecord>.Filter.Eq(r => r.ObservationId, normalizedId));
var result = await _collection
.DeleteOneAsync(filter, cancellationToken)
.ConfigureAwait(false);
return result.DeletedCount > 0;
}
public async ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var filter = Builders<VexObservationRecord>.Filter.Eq(r => r.Tenant, normalizedTenant);
return await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static string NormalizeTenant(string tenant)
{
if (string.IsNullOrWhiteSpace(tenant))
{
throw new ArgumentException("tenant is required", nameof(tenant));
}
return tenant.Trim().ToLowerInvariant();
}
private static VexObservationRecord ToRecord(VexObservation observation)
{
var firstStatement = observation.Statements.FirstOrDefault();
return new VexObservationRecord
{
Id = observation.ObservationId,
Tenant = observation.Tenant,
ObservationId = observation.ObservationId,
VulnerabilityId = firstStatement?.VulnerabilityId?.ToLowerInvariant() ?? string.Empty,
ProductKey = firstStatement?.ProductKey?.ToLowerInvariant() ?? string.Empty,
ProviderId = observation.ProviderId,
StreamId = observation.StreamId,
Status = firstStatement?.Status.ToString().ToLowerInvariant() ?? "unknown",
Document = new VexObservationDocumentRecord
{
Digest = observation.Upstream.ContentHash,
SourceUri = null,
Format = observation.Content.Format,
Revision = observation.Upstream.DocumentVersion,
Signature = new VexObservationSignatureRecord
{
Present = observation.Upstream.Signature.Present,
Subject = observation.Upstream.Signature.Format,
Issuer = observation.Upstream.Signature.KeyId,
VerifiedAt = null
}
},
Upstream = new VexObservationUpstreamRecord
{
UpstreamId = observation.Upstream.UpstreamId,
DocumentVersion = observation.Upstream.DocumentVersion,
FetchedAt = observation.Upstream.FetchedAt,
ReceivedAt = observation.Upstream.ReceivedAt,
ContentHash = observation.Upstream.ContentHash,
Signature = new VexObservationSignatureRecord
{
Present = observation.Upstream.Signature.Present,
Subject = observation.Upstream.Signature.Format,
Issuer = observation.Upstream.Signature.KeyId,
VerifiedAt = null
}
},
Content = new VexObservationContentRecord
{
Format = observation.Content.Format,
SpecVersion = observation.Content.SpecVersion,
Raw = BsonDocument.Parse(observation.Content.Raw.ToJsonString())
},
Statements = observation.Statements.Select(ToStatementRecord).ToList(),
Linkset = ToLinksetRecord(observation.Linkset),
CreatedAt = observation.CreatedAt.UtcDateTime
};
}
private static VexObservationStatementRecord ToStatementRecord(VexObservationStatement statement)
{
return new VexObservationStatementRecord
{
VulnerabilityId = statement.VulnerabilityId,
ProductKey = statement.ProductKey,
Status = statement.Status.ToString().ToLowerInvariant(),
LastObserved = statement.LastObserved,
Locator = statement.Locator,
Justification = statement.Justification?.ToString().ToLowerInvariant(),
IntroducedVersion = statement.IntroducedVersion,
FixedVersion = statement.FixedVersion,
Detail = null,
ScopeScore = null,
Epss = null,
Kev = null
};
}
private static VexObservationLinksetRecord ToLinksetRecord(VexObservationLinkset linkset)
{
return new VexObservationLinksetRecord
{
Aliases = linkset.Aliases.ToList(),
Purls = linkset.Purls.ToList(),
Cpes = linkset.Cpes.ToList(),
References = linkset.References.Select(r => new VexObservationReferenceRecord
{
Type = r.Type,
Url = r.Url
}).ToList(),
ReconciledFrom = linkset.ReconciledFrom.ToList(),
Disagreements = linkset.Disagreements.Select(d => new VexLinksetDisagreementRecord
{
ProviderId = d.ProviderId,
Status = d.Status,
Justification = d.Justification,
Confidence = d.Confidence
}).ToList(),
Observations = linkset.Observations.Select(o => new VexObservationLinksetObservationRecord
{
ObservationId = o.ObservationId,
ProviderId = o.ProviderId,
Status = o.Status,
Confidence = o.Confidence
}).ToList()
};
}
private static VexObservation ToModel(VexObservationRecord record)
{
var statements = record.Statements.Select(MapStatement).ToImmutableArray();
var linkset = MapLinkset(record.Linkset);
var upstreamSignature = record.Upstream?.Signature is null
? new VexObservationSignature(false, null, null, null)
: new VexObservationSignature(
record.Upstream.Signature.Present,
record.Upstream.Signature.Subject,
record.Upstream.Signature.Issuer,
signature: null);
var upstream = record.Upstream is null
? new VexObservationUpstream(
upstreamId: record.ObservationId,
documentVersion: null,
fetchedAt: record.CreatedAt,
receivedAt: record.CreatedAt,
contentHash: record.Document.Digest,
signature: upstreamSignature)
: new VexObservationUpstream(
record.Upstream.UpstreamId,
record.Upstream.DocumentVersion,
record.Upstream.FetchedAt,
record.Upstream.ReceivedAt,
record.Upstream.ContentHash,
upstreamSignature);
var content = record.Content is null
? new VexObservationContent("unknown", null, new JsonObject())
: new VexObservationContent(
record.Content.Format ?? "unknown",
record.Content.SpecVersion,
JsonNode.Parse(record.Content.Raw.ToJson()) ?? new JsonObject(),
metadata: ImmutableDictionary<string, string>.Empty);
return new VexObservation(
observationId: record.ObservationId,
tenant: record.Tenant,
providerId: record.ProviderId,
streamId: string.IsNullOrWhiteSpace(record.StreamId) ? record.ProviderId : record.StreamId,
upstream: upstream,
statements: statements,
content: content,
linkset: linkset,
createdAt: new DateTimeOffset(record.CreatedAt, TimeSpan.Zero),
supersedes: ImmutableArray<string>.Empty,
attributes: ImmutableDictionary<string, string>.Empty);
}
private static VexObservationStatement MapStatement(VexObservationStatementRecord record)
{
var justification = string.IsNullOrWhiteSpace(record.Justification)
? (VexJustification?)null
: Enum.Parse<VexJustification>(record.Justification, ignoreCase: true);
return new VexObservationStatement(
record.VulnerabilityId,
record.ProductKey,
Enum.Parse<VexClaimStatus>(record.Status, ignoreCase: true),
record.LastObserved,
locator: record.Locator,
justification: justification,
introducedVersion: record.IntroducedVersion,
fixedVersion: record.FixedVersion,
purl: null,
cpe: null,
evidence: null,
metadata: ImmutableDictionary<string, string>.Empty);
}
private static VexObservationLinkset MapLinkset(VexObservationLinksetRecord record)
{
var aliases = record?.Aliases?.Where(NotNullOrWhiteSpace).Select(a => a.Trim()).ToImmutableArray() ?? ImmutableArray<string>.Empty;
var purls = record?.Purls?.Where(NotNullOrWhiteSpace).Select(p => p.Trim()).ToImmutableArray() ?? ImmutableArray<string>.Empty;
var cpes = record?.Cpes?.Where(NotNullOrWhiteSpace).Select(c => c.Trim()).ToImmutableArray() ?? ImmutableArray<string>.Empty;
var references = record?.References?.Select(r => new VexObservationReference(r.Type, r.Url)).ToImmutableArray() ?? ImmutableArray<VexObservationReference>.Empty;
var reconciledFrom = record?.ReconciledFrom?.Where(NotNullOrWhiteSpace).Select(r => r.Trim()).ToImmutableArray() ?? ImmutableArray<string>.Empty;
var disagreements = record?.Disagreements?.Select(d => new VexObservationDisagreement(d.ProviderId, d.Status, d.Justification, d.Confidence)).ToImmutableArray() ?? ImmutableArray<VexObservationDisagreement>.Empty;
var observationRefs = record?.Observations?.Select(o => new VexLinksetObservationRefModel(
o.ObservationId,
o.ProviderId,
o.Status,
o.Confidence)).ToImmutableArray() ?? ImmutableArray<VexLinksetObservationRefModel>.Empty;
return new VexObservationLinkset(aliases, purls, cpes, references, reconciledFrom, disagreements, observationRefs);
}
private static bool NotNullOrWhiteSpace(string? value) => !string.IsNullOrWhiteSpace(value);
}

View File

@@ -0,0 +1,316 @@
using System.Collections.Immutable;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Storage.Mongo;
/// <summary>
/// MongoDB record for timeline events.
/// </summary>
[BsonIgnoreExtraElements]
internal sealed class VexTimelineEventRecord
{
[BsonId]
public string Id { get; set; } = default!;
public string Tenant { get; set; } = default!;
public string ProviderId { get; set; } = default!;
public string StreamId { get; set; } = default!;
public string EventType { get; set; } = default!;
public string TraceId { get; set; } = default!;
public string JustificationSummary { get; set; } = string.Empty;
public string? EvidenceHash { get; set; }
public string? PayloadHash { get; set; }
public DateTime CreatedAt { get; set; }
= DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public Dictionary<string, string> Attributes { get; set; } = new(StringComparer.Ordinal);
}
/// <summary>
/// MongoDB implementation of the timeline event store.
/// </summary>
internal sealed class MongoVexTimelineEventStore : IVexTimelineEventStore
{
private readonly IMongoCollection<VexTimelineEventRecord> _collection;
public MongoVexTimelineEventStore(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
_collection = database.GetCollection<VexTimelineEventRecord>(VexMongoCollectionNames.TimelineEvents);
}
public async ValueTask<string> InsertAsync(
TimelineEvent evt,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(evt);
var record = ToRecord(evt);
try
{
await _collection.InsertOneAsync(record, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return record.Id;
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
// Event already exists, return the ID anyway
return record.Id;
}
}
public async ValueTask<int> InsertManyAsync(
string tenant,
IEnumerable<TimelineEvent> events,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var records = events
.Where(e => e is not null && string.Equals(NormalizeTenant(e.Tenant), normalizedTenant, StringComparison.Ordinal))
.Select(ToRecord)
.ToList();
if (records.Count == 0)
{
return 0;
}
var options = new InsertManyOptions { IsOrdered = false };
try
{
await _collection.InsertManyAsync(records, options, cancellationToken)
.ConfigureAwait(false);
return records.Count;
}
catch (MongoBulkWriteException<VexTimelineEventRecord> ex)
{
var duplicates = ex.WriteErrors?.Count(e => e.Category == ServerErrorCategory.DuplicateKey) ?? 0;
return records.Count - duplicates;
}
}
public async ValueTask<IReadOnlyList<TimelineEvent>> FindByTimeRangeAsync(
string tenant,
DateTimeOffset from,
DateTimeOffset to,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var fromUtc = from.UtcDateTime;
var toUtc = to.UtcDateTime;
var filter = Builders<VexTimelineEventRecord>.Filter.And(
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexTimelineEventRecord>.Filter.Gte(r => r.CreatedAt, fromUtc),
Builders<VexTimelineEventRecord>.Filter.Lte(r => r.CreatedAt, toUtc));
var records = await _collection
.Find(filter)
.Sort(Builders<VexTimelineEventRecord>.Sort.Ascending(r => r.CreatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<TimelineEvent>> FindByTraceIdAsync(
string tenant,
string traceId,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedTraceId = traceId?.Trim() ?? throw new ArgumentNullException(nameof(traceId));
var filter = Builders<VexTimelineEventRecord>.Filter.And(
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.TraceId, normalizedTraceId));
var records = await _collection
.Find(filter)
.Sort(Builders<VexTimelineEventRecord>.Sort.Ascending(r => r.CreatedAt))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<TimelineEvent>> FindByProviderAsync(
string tenant,
string providerId,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedProvider = providerId?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(providerId));
var filter = Builders<VexTimelineEventRecord>.Filter.And(
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.ProviderId, normalizedProvider));
var records = await _collection
.Find(filter)
.Sort(Builders<VexTimelineEventRecord>.Sort.Descending(r => r.CreatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<TimelineEvent>> FindByEventTypeAsync(
string tenant,
string eventType,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedType = eventType?.Trim().ToLowerInvariant()
?? throw new ArgumentNullException(nameof(eventType));
var filter = Builders<VexTimelineEventRecord>.Filter.And(
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.EventType, normalizedType));
var records = await _collection
.Find(filter)
.Sort(Builders<VexTimelineEventRecord>.Sort.Descending(r => r.CreatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<IReadOnlyList<TimelineEvent>> GetRecentAsync(
string tenant,
int limit,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var filter = Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant);
var records = await _collection
.Find(filter)
.Sort(Builders<VexTimelineEventRecord>.Sort.Descending(r => r.CreatedAt))
.Limit(Math.Max(1, limit))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(ToModel).ToList();
}
public async ValueTask<TimelineEvent?> GetByIdAsync(
string tenant,
string eventId,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedId = eventId?.Trim() ?? throw new ArgumentNullException(nameof(eventId));
var filter = Builders<VexTimelineEventRecord>.Filter.And(
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Id, normalizedId));
var record = await _collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return record is null ? null : ToModel(record);
}
public async ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var filter = Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant);
return await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
public async ValueTask<long> CountInRangeAsync(
string tenant,
DateTimeOffset from,
DateTimeOffset to,
CancellationToken cancellationToken)
{
var normalizedTenant = NormalizeTenant(tenant);
var fromUtc = from.UtcDateTime;
var toUtc = to.UtcDateTime;
var filter = Builders<VexTimelineEventRecord>.Filter.And(
Builders<VexTimelineEventRecord>.Filter.Eq(r => r.Tenant, normalizedTenant),
Builders<VexTimelineEventRecord>.Filter.Gte(r => r.CreatedAt, fromUtc),
Builders<VexTimelineEventRecord>.Filter.Lte(r => r.CreatedAt, toUtc));
return await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static string NormalizeTenant(string tenant)
{
if (string.IsNullOrWhiteSpace(tenant))
{
throw new ArgumentException("tenant is required", nameof(tenant));
}
return tenant.Trim().ToLowerInvariant();
}
private static VexTimelineEventRecord ToRecord(TimelineEvent evt)
{
return new VexTimelineEventRecord
{
Id = evt.EventId,
Tenant = evt.Tenant,
ProviderId = evt.ProviderId.ToLowerInvariant(),
StreamId = evt.StreamId.ToLowerInvariant(),
EventType = evt.EventType.ToLowerInvariant(),
TraceId = evt.TraceId,
JustificationSummary = evt.JustificationSummary,
EvidenceHash = evt.EvidenceHash,
PayloadHash = evt.PayloadHash,
CreatedAt = evt.CreatedAt.UtcDateTime,
Attributes = evt.Attributes.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal)
};
}
private static TimelineEvent ToModel(VexTimelineEventRecord record)
{
var attributes = record.Attributes?.ToImmutableDictionary(StringComparer.Ordinal)
?? ImmutableDictionary<string, string>.Empty;
return new TimelineEvent(
eventId: record.Id,
tenant: record.Tenant,
providerId: record.ProviderId,
streamId: record.StreamId,
eventType: record.EventType,
traceId: record.TraceId,
justificationSummary: record.JustificationSummary,
createdAt: new DateTimeOffset(DateTime.SpecifyKind(record.CreatedAt, DateTimeKind.Utc)),
evidenceHash: record.EvidenceHash,
payloadHash: record.PayloadHash,
attributes: attributes);
}
}

View File

@@ -4,8 +4,8 @@ using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo.Migrations;
using StellaOps.Excititor.Core.Observations;
using StellaOps.Excititor.Storage.Mongo.Migrations;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Storage.Mongo;
@@ -49,24 +49,32 @@ public static class VexMongoServiceCollectionExtensions
services.AddScoped<IVexRawStore, MongoVexRawStore>();
services.AddScoped<IVexExportStore, MongoVexExportStore>();
services.AddScoped<IVexProviderStore, MongoVexProviderStore>();
services.AddScoped<IVexNormalizerRouter, StorageBackedVexNormalizerRouter>();
services.AddScoped<IVexConsensusStore, MongoVexConsensusStore>();
services.AddScoped<IVexConsensusHoldStore, MongoVexConsensusHoldStore>();
services.AddScoped<IVexClaimStore, MongoVexClaimStore>();
services.AddScoped<IVexCacheIndex, MongoVexCacheIndex>();
services.AddScoped<IVexCacheMaintenance, MongoVexCacheMaintenance>();
services.AddScoped<IVexConnectorStateRepository, MongoVexConnectorStateRepository>();
services.AddScoped<IAirgapImportStore, MongoAirgapImportStore>();
services.AddScoped<VexStatementBackfillService>();
services.AddScoped<IVexObservationLookup, MongoVexObservationLookup>();
services.AddSingleton<IVexMongoMigration, VexInitialIndexMigration>();
services.AddSingleton<IVexMongoMigration, VexRawSchemaMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusSignalsMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusHoldMigration>();
services.AddSingleton<IVexMongoMigration, VexObservationCollectionsMigration>();
services.AddSingleton<VexMongoMigrationRunner>();
services.AddHostedService<VexMongoMigrationHostedService>();
return services;
}
}
services.AddScoped<IVexProviderStore, MongoVexProviderStore>();
services.AddScoped<IVexNormalizerRouter, StorageBackedVexNormalizerRouter>();
services.AddScoped<IVexConsensusStore, MongoVexConsensusStore>();
services.AddScoped<IVexConsensusHoldStore, MongoVexConsensusHoldStore>();
services.AddScoped<IVexClaimStore, MongoVexClaimStore>();
services.AddScoped<IVexCacheIndex, MongoVexCacheIndex>();
services.AddScoped<IVexCacheMaintenance, MongoVexCacheMaintenance>();
services.AddScoped<IVexConnectorStateRepository, MongoVexConnectorStateRepository>();
services.AddScoped<IAirgapImportStore, MongoAirgapImportStore>();
services.AddScoped<VexStatementBackfillService>();
services.AddScoped<IVexObservationLookup, MongoVexObservationLookup>();
services.AddScoped<IVexObservationStore, MongoVexObservationStore>();
services.AddScoped<IVexLinksetStore, MongoVexLinksetStore>();
services.AddScoped<IVexLinksetEventPublisher, MongoVexLinksetEventPublisher>();
services.AddScoped<VexLinksetDisagreementService>();
services.AddScoped<IVexTimelineEventStore, MongoVexTimelineEventStore>();
services.AddScoped<IVexTimelineEventEmitter, VexTimelineEventEmitter>();
services.AddSingleton<IVexMongoMigration, VexInitialIndexMigration>();
services.AddSingleton<IVexMongoMigration, VexTimelineEventIndexMigration>();
services.AddSingleton<IVexMongoMigration, VexRawSchemaMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusSignalsMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusHoldMigration>();
services.AddSingleton<IVexMongoMigration, VexObservationCollectionsMigration>();
services.AddSingleton<IVexMongoMigration, VexRawIdempotencyIndexMigration>();
services.AddSingleton<VexMongoMigrationRunner>();
services.AddHostedService<VexMongoMigrationHostedService>();
return services;
}
}

View File

@@ -0,0 +1,299 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
using MongoDB.Bson;
namespace StellaOps.Excititor.Storage.Mongo.Validation;
/// <summary>
/// Validates VEX raw documents against the schema defined in <see cref="Migrations.VexRawSchemaMigration"/>.
/// Provides programmatic validation for operators to prove Excititor stores only immutable evidence.
/// </summary>
public static class VexRawSchemaValidator
{
private static readonly ImmutableHashSet<string> ValidFormats = ImmutableHashSet.Create(
StringComparer.OrdinalIgnoreCase,
"csaf", "cyclonedx", "openvex");
private static readonly ImmutableHashSet<BsonType> ValidContentTypes = ImmutableHashSet.Create(
BsonType.Binary, BsonType.String);
private static readonly ImmutableHashSet<BsonType> ValidGridFsTypes = ImmutableHashSet.Create(
BsonType.ObjectId, BsonType.Null, BsonType.String);
/// <summary>
/// Validates a VEX raw document against the schema requirements.
/// </summary>
/// <param name="document">The document to validate.</param>
/// <returns>Validation result with any violations found.</returns>
public static VexRawValidationResult Validate(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var violations = new List<VexRawSchemaViolation>();
// Required fields
ValidateRequired(document, "_id", violations);
ValidateRequired(document, "providerId", violations);
ValidateRequired(document, "format", violations);
ValidateRequired(document, "sourceUri", violations);
ValidateRequired(document, "retrievedAt", violations);
ValidateRequired(document, "digest", violations);
// Field types and constraints
ValidateStringField(document, "_id", minLength: 1, violations);
ValidateStringField(document, "providerId", minLength: 1, violations);
ValidateFormatEnum(document, violations);
ValidateStringField(document, "sourceUri", minLength: 1, violations);
ValidateDateField(document, "retrievedAt", violations);
ValidateStringField(document, "digest", minLength: 32, violations);
// Optional fields with type constraints
if (document.Contains("content"))
{
ValidateContentField(document, violations);
}
if (document.Contains("gridFsObjectId"))
{
ValidateGridFsObjectIdField(document, violations);
}
if (document.Contains("metadata"))
{
ValidateMetadataField(document, violations);
}
return new VexRawValidationResult(
document.GetValue("_id", BsonNull.Value).ToString() ?? "<unknown>",
violations.Count == 0,
violations.ToImmutableArray());
}
/// <summary>
/// Validates multiple documents and returns aggregated results.
/// </summary>
public static VexRawBatchValidationResult ValidateBatch(IEnumerable<BsonDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var results = new List<VexRawValidationResult>();
foreach (var doc in documents)
{
results.Add(Validate(doc));
}
var valid = results.Count(r => r.IsValid);
var invalid = results.Count(r => !r.IsValid);
return new VexRawBatchValidationResult(
results.Count,
valid,
invalid,
results.Where(r => !r.IsValid).ToImmutableArray());
}
/// <summary>
/// Gets the MongoDB JSON Schema document for offline validation.
/// </summary>
public static BsonDocument GetJsonSchema()
{
var properties = new BsonDocument
{
{ "_id", new BsonDocument { { "bsonType", "string" }, { "description", "Content digest serving as immutable key" } } },
{ "providerId", new BsonDocument { { "bsonType", "string" }, { "minLength", 1 }, { "description", "VEX provider identifier" } } },
{ "format", new BsonDocument
{
{ "bsonType", "string" },
{ "enum", new BsonArray { "csaf", "cyclonedx", "openvex" } },
{ "description", "VEX document format" }
}
},
{ "sourceUri", new BsonDocument { { "bsonType", "string" }, { "minLength", 1 }, { "description", "Original source URI" } } },
{ "retrievedAt", new BsonDocument { { "bsonType", "date" }, { "description", "Timestamp when document was fetched" } } },
{ "digest", new BsonDocument { { "bsonType", "string" }, { "minLength", 32 }, { "description", "Content hash (SHA-256 hex)" } } },
{ "content", new BsonDocument
{
{ "bsonType", new BsonArray { "binData", "string" } },
{ "description", "Raw document content (binary or base64 string)" }
}
},
{ "gridFsObjectId", new BsonDocument
{
{ "bsonType", new BsonArray { "objectId", "null", "string" } },
{ "description", "GridFS reference for large documents" }
}
},
{ "metadata", new BsonDocument
{
{ "bsonType", "object" },
{ "additionalProperties", true },
{ "description", "Provider-specific metadata (string values only)" }
}
}
};
return new BsonDocument
{
{
"$jsonSchema",
new BsonDocument
{
{ "bsonType", "object" },
{ "title", "VEX Raw Document Schema" },
{ "description", "Schema for immutable VEX evidence storage. Documents are content-addressed and must not be modified after insertion." },
{ "required", new BsonArray { "_id", "providerId", "format", "sourceUri", "retrievedAt", "digest" } },
{ "properties", properties },
{ "additionalProperties", true }
}
}
};
}
/// <summary>
/// Gets the schema as a JSON string for operator documentation.
/// </summary>
public static string GetJsonSchemaAsJson()
{
return GetJsonSchema().ToJson(new MongoDB.Bson.IO.JsonWriterSettings { Indent = true });
}
private static void ValidateRequired(BsonDocument doc, string field, List<VexRawSchemaViolation> violations)
{
if (!doc.Contains(field) || doc[field].IsBsonNull)
{
violations.Add(new VexRawSchemaViolation(field, $"Required field '{field}' is missing or null"));
}
}
private static void ValidateStringField(BsonDocument doc, string field, int minLength, List<VexRawSchemaViolation> violations)
{
if (!doc.Contains(field))
{
return;
}
var value = doc[field];
if (value.IsBsonNull)
{
return;
}
if (!value.IsString)
{
violations.Add(new VexRawSchemaViolation(field, $"Field '{field}' must be a string, got {value.BsonType}"));
return;
}
if (value.AsString.Length < minLength)
{
violations.Add(new VexRawSchemaViolation(field, $"Field '{field}' must have minimum length {minLength}, got {value.AsString.Length}"));
}
}
private static void ValidateFormatEnum(BsonDocument doc, List<VexRawSchemaViolation> violations)
{
if (!doc.Contains("format"))
{
return;
}
var value = doc["format"];
if (value.IsBsonNull || !value.IsString)
{
return;
}
if (!ValidFormats.Contains(value.AsString))
{
violations.Add(new VexRawSchemaViolation("format", $"Field 'format' must be one of [{string.Join(", ", ValidFormats)}], got '{value.AsString}'"));
}
}
private static void ValidateDateField(BsonDocument doc, string field, List<VexRawSchemaViolation> violations)
{
if (!doc.Contains(field))
{
return;
}
var value = doc[field];
if (value.IsBsonNull)
{
return;
}
if (value.BsonType != BsonType.DateTime)
{
violations.Add(new VexRawSchemaViolation(field, $"Field '{field}' must be a date, got {value.BsonType}"));
}
}
private static void ValidateContentField(BsonDocument doc, List<VexRawSchemaViolation> violations)
{
var value = doc["content"];
if (value.IsBsonNull)
{
return;
}
if (!ValidContentTypes.Contains(value.BsonType))
{
violations.Add(new VexRawSchemaViolation("content", $"Field 'content' must be binary or string, got {value.BsonType}"));
}
}
private static void ValidateGridFsObjectIdField(BsonDocument doc, List<VexRawSchemaViolation> violations)
{
var value = doc["gridFsObjectId"];
if (!ValidGridFsTypes.Contains(value.BsonType))
{
violations.Add(new VexRawSchemaViolation("gridFsObjectId", $"Field 'gridFsObjectId' must be objectId, null, or string, got {value.BsonType}"));
}
}
private static void ValidateMetadataField(BsonDocument doc, List<VexRawSchemaViolation> violations)
{
var value = doc["metadata"];
if (value.IsBsonNull)
{
return;
}
if (value.BsonType != BsonType.Document)
{
violations.Add(new VexRawSchemaViolation("metadata", $"Field 'metadata' must be an object, got {value.BsonType}"));
return;
}
var metadata = value.AsBsonDocument;
foreach (var element in metadata)
{
if (!element.Value.IsString && !element.Value.IsBsonNull)
{
violations.Add(new VexRawSchemaViolation($"metadata.{element.Name}", $"Metadata field '{element.Name}' must be a string, got {element.Value.BsonType}"));
}
}
}
}
/// <summary>
/// Represents a schema violation found during validation.
/// </summary>
public sealed record VexRawSchemaViolation(string Field, string Message);
/// <summary>
/// Result of validating a single VEX raw document.
/// </summary>
public sealed record VexRawValidationResult(
string DocumentId,
bool IsValid,
ImmutableArray<VexRawSchemaViolation> Violations);
/// <summary>
/// Result of validating a batch of VEX raw documents.
/// </summary>
public sealed record VexRawBatchValidationResult(
int TotalCount,
int ValidCount,
int InvalidCount,
ImmutableArray<VexRawValidationResult> InvalidDocuments);

View File

@@ -47,6 +47,7 @@ public static class VexMongoMappingRegistry
RegisterClassMap<VexConnectorStateDocument>();
RegisterClassMap<VexConsensusHoldRecord>();
RegisterClassMap<AirgapImportRecord>();
RegisterClassMap<VexTimelineEventRecord>();
}
private static void RegisterClassMap<TDocument>()
@@ -80,5 +81,7 @@ public static class VexMongoCollectionNames
public const string Attestations = "vex.attestations";
public const string Observations = "vex.observations";
public const string Linksets = "vex.linksets";
public const string LinksetEvents = "vex.linkset_events";
public const string AirgapImports = "vex.airgap_imports";
public const string TimelineEvents = "vex.timeline_events";
}

View File

@@ -490,6 +490,10 @@ internal sealed class VexLinksetRecord
public DateTime CreatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public DateTime UpdatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public List<VexObservationLinksetObservationRecord> Observations { get; set; } = new();
public List<VexLinksetDisagreementRecord> Disagreements { get; set; } = new();
}
@@ -1310,6 +1314,21 @@ internal sealed class VexConnectorStateDocument
public string? LastFailureReason { get; set; }
= null;
public DateTime? LastHeartbeatAt { get; set; }
= null;
public string? LastHeartbeatStatus { get; set; }
= null;
public string? LastArtifactHash { get; set; }
= null;
public string? LastArtifactKind { get; set; }
= null;
public string? LastCheckpoint { get; set; }
= null;
public static VexConnectorStateDocument FromRecord(VexConnectorState state)
=> new()
{
@@ -1323,6 +1342,11 @@ internal sealed class VexConnectorStateDocument
FailureCount = state.FailureCount,
NextEligibleRun = state.NextEligibleRun?.UtcDateTime,
LastFailureReason = state.LastFailureReason,
LastHeartbeatAt = state.LastHeartbeatAt?.UtcDateTime,
LastHeartbeatStatus = state.LastHeartbeatStatus,
LastArtifactHash = state.LastArtifactHash,
LastArtifactKind = state.LastArtifactKind,
LastCheckpoint = state.LastCheckpoint,
};
public VexConnectorState ToRecord()
@@ -1336,6 +1360,9 @@ internal sealed class VexConnectorStateDocument
var nextEligibleRun = NextEligibleRun.HasValue
? new DateTimeOffset(DateTime.SpecifyKind(NextEligibleRun.Value, DateTimeKind.Utc))
: (DateTimeOffset?)null;
var lastHeartbeatAt = LastHeartbeatAt.HasValue
? new DateTimeOffset(DateTime.SpecifyKind(LastHeartbeatAt.Value, DateTimeKind.Utc))
: (DateTimeOffset?)null;
return new VexConnectorState(
ConnectorId,
@@ -1345,6 +1372,52 @@ internal sealed class VexConnectorStateDocument
lastSuccessAt,
FailureCount,
nextEligibleRun,
string.IsNullOrWhiteSpace(LastFailureReason) ? null : LastFailureReason);
string.IsNullOrWhiteSpace(LastFailureReason) ? null : LastFailureReason,
lastHeartbeatAt,
LastHeartbeatStatus,
LastArtifactHash,
LastArtifactKind,
LastCheckpoint);
}
}
[BsonIgnoreExtraElements]
internal sealed class VexLinksetEventRecord
{
[BsonId]
public string Id { get; set; } = default!;
public string EventType { get; set; } = default!;
public string Tenant { get; set; } = default!;
public string LinksetId { get; set; } = default!;
public string VulnerabilityId { get; set; } = default!;
public string ProductKey { get; set; } = default!;
public List<VexLinksetEventObservationRecord> Observations { get; set; } = new();
public List<VexLinksetDisagreementRecord> Disagreements { get; set; } = new();
public DateTime CreatedAtUtc { get; set; } = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public DateTime PublishedAtUtc { get; set; } = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public int ConflictCount { get; set; } = 0;
public int ObservationCount { get; set; } = 0;
}
[BsonIgnoreExtraElements]
internal sealed class VexLinksetEventObservationRecord
{
public string ObservationId { get; set; } = default!;
public string ProviderId { get; set; } = default!;
public string Status { get; set; } = default!;
public double? Confidence { get; set; } = null;
}

View File

@@ -0,0 +1,169 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Storage.Mongo;
/// <summary>
/// Default implementation of <see cref="IVexTimelineEventEmitter"/> that persists events to MongoDB.
/// </summary>
internal sealed class VexTimelineEventEmitter : IVexTimelineEventEmitter
{
private readonly IVexTimelineEventStore _store;
private readonly ILogger<VexTimelineEventEmitter> _logger;
private readonly TimeProvider _timeProvider;
public VexTimelineEventEmitter(
IVexTimelineEventStore store,
ILogger<VexTimelineEventEmitter> logger,
TimeProvider? timeProvider = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async ValueTask EmitObservationIngestAsync(
string tenant,
string providerId,
string streamId,
string traceId,
string observationId,
string evidenceHash,
string justificationSummary,
ImmutableDictionary<string, string>? attributes = null,
CancellationToken cancellationToken = default)
{
var eventAttributes = (attributes ?? ImmutableDictionary<string, string>.Empty)
.SetItem(VexTimelineEventAttributes.ObservationId, observationId);
var evt = new TimelineEvent(
eventId: GenerateEventId(tenant, providerId, VexTimelineEventTypes.ObservationIngested),
tenant: tenant,
providerId: providerId,
streamId: streamId,
eventType: VexTimelineEventTypes.ObservationIngested,
traceId: traceId,
justificationSummary: justificationSummary,
createdAt: _timeProvider.GetUtcNow(),
evidenceHash: evidenceHash,
payloadHash: null,
attributes: eventAttributes);
await EmitAsync(evt, cancellationToken).ConfigureAwait(false);
}
public async ValueTask EmitLinksetUpdateAsync(
string tenant,
string providerId,
string streamId,
string traceId,
string linksetId,
string vulnerabilityId,
string productKey,
string payloadHash,
string justificationSummary,
ImmutableDictionary<string, string>? attributes = null,
CancellationToken cancellationToken = default)
{
var eventAttributes = (attributes ?? ImmutableDictionary<string, string>.Empty)
.SetItem(VexTimelineEventAttributes.LinksetId, linksetId)
.SetItem(VexTimelineEventAttributes.VulnerabilityId, vulnerabilityId)
.SetItem(VexTimelineEventAttributes.ProductKey, productKey);
var evt = new TimelineEvent(
eventId: GenerateEventId(tenant, providerId, VexTimelineEventTypes.LinksetUpdated),
tenant: tenant,
providerId: providerId,
streamId: streamId,
eventType: VexTimelineEventTypes.LinksetUpdated,
traceId: traceId,
justificationSummary: justificationSummary,
createdAt: _timeProvider.GetUtcNow(),
evidenceHash: null,
payloadHash: payloadHash,
attributes: eventAttributes);
await EmitAsync(evt, cancellationToken).ConfigureAwait(false);
}
public async ValueTask EmitAsync(
TimelineEvent evt,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(evt);
try
{
var eventId = await _store.InsertAsync(evt, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Timeline event emitted: {EventType} for tenant {Tenant}, provider {ProviderId}, trace {TraceId}",
evt.EventType,
evt.Tenant,
evt.ProviderId,
evt.TraceId);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to emit timeline event {EventType} for tenant {Tenant}, provider {ProviderId}: {Message}",
evt.EventType,
evt.Tenant,
evt.ProviderId,
ex.Message);
// Don't throw - timeline events are non-critical and shouldn't block main operations
}
}
public async ValueTask EmitBatchAsync(
string tenant,
IEnumerable<TimelineEvent> events,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(events);
var eventList = events.ToList();
if (eventList.Count == 0)
{
return;
}
try
{
var insertedCount = await _store.InsertManyAsync(tenant, eventList, cancellationToken)
.ConfigureAwait(false);
_logger.LogDebug(
"Batch timeline events emitted: {InsertedCount}/{TotalCount} for tenant {Tenant}",
insertedCount,
eventList.Count,
tenant);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to emit batch timeline events for tenant {Tenant}: {Message}",
tenant,
ex.Message);
// Don't throw - timeline events are non-critical
}
}
/// <summary>
/// Generates a deterministic event ID based on tenant, provider, event type, and timestamp.
/// </summary>
private string GenerateEventId(string tenant, string providerId, string eventType)
{
var timestamp = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds();
var input = $"{tenant}|{providerId}|{eventType}|{timestamp}|{Guid.NewGuid():N}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"evt:{Convert.ToHexString(hash).ToLowerInvariant()[..32]}";
}
}

View File

@@ -0,0 +1,170 @@
using System;
using System.Linq;
using StellaOps.Excititor.Core.Canonicalization;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests.Canonicalization;
public class VexAdvisoryKeyCanonicalizerTests
{
private readonly VexAdvisoryKeyCanonicalizer _canonicalizer = new();
[Theory]
[InlineData("CVE-2025-12345", "CVE-2025-12345", VexAdvisoryScope.Global)]
[InlineData("cve-2025-12345", "CVE-2025-12345", VexAdvisoryScope.Global)]
[InlineData("CVE-2024-1234567", "CVE-2024-1234567", VexAdvisoryScope.Global)]
public void Canonicalize_Cve_ReturnsGlobalScope(string input, string expectedKey, VexAdvisoryScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(expectedKey, result.AdvisoryKey);
Assert.Equal(expectedScope, result.Scope);
Assert.Single(result.Links);
Assert.Equal("cve", result.Links[0].Type);
Assert.True(result.Links[0].IsOriginal);
}
[Theory]
[InlineData("GHSA-abcd-efgh-ijkl", "ECO:GHSA-ABCD-EFGH-IJKL", VexAdvisoryScope.Ecosystem)]
[InlineData("ghsa-1234-5678-90ab", "ECO:GHSA-1234-5678-90AB", VexAdvisoryScope.Ecosystem)]
public void Canonicalize_Ghsa_ReturnsEcosystemScope(string input, string expectedKey, VexAdvisoryScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(expectedKey, result.AdvisoryKey);
Assert.Equal(expectedScope, result.Scope);
Assert.Equal("ghsa", result.Links[0].Type);
}
[Theory]
[InlineData("RHSA-2025:1234", "VND:RHSA-2025:1234", VexAdvisoryScope.Vendor)]
[InlineData("RHBA-2024:5678", "VND:RHBA-2024:5678", VexAdvisoryScope.Vendor)]
public void Canonicalize_Rhsa_ReturnsVendorScope(string input, string expectedKey, VexAdvisoryScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(expectedKey, result.AdvisoryKey);
Assert.Equal(expectedScope, result.Scope);
Assert.Equal("rhsa", result.Links[0].Type);
}
[Theory]
[InlineData("DSA-5678", "DST:DSA-5678", VexAdvisoryScope.Distribution)]
[InlineData("DSA-1234-1", "DST:DSA-1234-1", VexAdvisoryScope.Distribution)]
[InlineData("USN-6543", "DST:USN-6543", VexAdvisoryScope.Distribution)]
[InlineData("USN-1234-2", "DST:USN-1234-2", VexAdvisoryScope.Distribution)]
public void Canonicalize_DistributionIds_ReturnsDistributionScope(string input, string expectedKey, VexAdvisoryScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(expectedKey, result.AdvisoryKey);
Assert.Equal(expectedScope, result.Scope);
}
[Fact]
public void Canonicalize_WithAliases_PreservesAllLinks()
{
var aliases = new[] { "RHSA-2025:1234", "GHSA-abcd-efgh-ijkl" };
var result = _canonicalizer.Canonicalize("CVE-2025-12345", aliases);
Assert.Equal("CVE-2025-12345", result.AdvisoryKey);
Assert.Equal(3, result.Links.Length);
var original = result.Links.Single(l => l.IsOriginal);
Assert.Equal("CVE-2025-12345", original.Identifier);
Assert.Equal("cve", original.Type);
var nonOriginal = result.Links.Where(l => !l.IsOriginal).ToArray();
Assert.Equal(2, nonOriginal.Length);
Assert.Contains(nonOriginal, l => l.Type == "rhsa");
Assert.Contains(nonOriginal, l => l.Type == "ghsa");
}
[Fact]
public void Canonicalize_WithDuplicateAliases_DeduplicatesLinks()
{
var aliases = new[] { "CVE-2025-12345", "cve-2025-12345", "RHSA-2025:1234" };
var result = _canonicalizer.Canonicalize("CVE-2025-12345", aliases);
// Should have 2 links: original CVE and RHSA (duplicates removed)
Assert.Equal(2, result.Links.Length);
}
[Fact]
public void Canonicalize_UnknownFormat_ReturnsUnknownScope()
{
var result = _canonicalizer.Canonicalize("VENDOR-CUSTOM-12345");
Assert.Equal("UNK:VENDOR-CUSTOM-12345", result.AdvisoryKey);
Assert.Equal(VexAdvisoryScope.Unknown, result.Scope);
Assert.Equal("other", result.Links[0].Type);
}
[Fact]
public void Canonicalize_NullInput_ThrowsArgumentException()
{
Assert.ThrowsAny<ArgumentException>(() => _canonicalizer.Canonicalize(null!));
}
[Fact]
public void Canonicalize_EmptyInput_ThrowsArgumentException()
{
Assert.ThrowsAny<ArgumentException>(() => _canonicalizer.Canonicalize(""));
}
[Fact]
public void Canonicalize_WhitespaceInput_ThrowsArgumentException()
{
Assert.ThrowsAny<ArgumentException>(() => _canonicalizer.Canonicalize(" "));
}
[Fact]
public void ExtractCveFromAliases_WithCve_ReturnsCve()
{
var aliases = new[] { "RHSA-2025:1234", "CVE-2025-99999", "GHSA-xxxx-yyyy-zzzz" };
var cve = _canonicalizer.ExtractCveFromAliases(aliases);
Assert.Equal("CVE-2025-99999", cve);
}
[Fact]
public void ExtractCveFromAliases_WithoutCve_ReturnsNull()
{
var aliases = new[] { "RHSA-2025:1234", "GHSA-xxxx-yyyy-zzzz" };
var cve = _canonicalizer.ExtractCveFromAliases(aliases);
Assert.Null(cve);
}
[Fact]
public void ExtractCveFromAliases_NullInput_ReturnsNull()
{
var cve = _canonicalizer.ExtractCveFromAliases(null);
Assert.Null(cve);
}
[Fact]
public void OriginalId_ReturnsOriginalIdentifier()
{
var result = _canonicalizer.Canonicalize("CVE-2025-12345");
Assert.Equal("CVE-2025-12345", result.OriginalId);
}
[Fact]
public void Aliases_ReturnsNonOriginalIdentifiers()
{
var aliases = new[] { "RHSA-2025:1234", "GHSA-abcd-efgh-ijkl" };
var result = _canonicalizer.Canonicalize("CVE-2025-12345", aliases);
var aliasArray = result.Aliases.ToArray();
Assert.Equal(2, aliasArray.Length);
Assert.Contains("RHSA-2025:1234", aliasArray);
Assert.Contains("GHSA-abcd-efgh-ijkl", aliasArray);
}
}

View File

@@ -0,0 +1,235 @@
using System;
using System.Linq;
using StellaOps.Excititor.Core.Canonicalization;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests.Canonicalization;
public class VexProductKeyCanonicalizerTests
{
private readonly VexProductKeyCanonicalizer _canonicalizer = new();
[Theory]
[InlineData("pkg:npm/leftpad@1.0.0", "pkg:npm/leftpad@1.0.0", VexProductKeyType.Purl, VexProductScope.Package)]
[InlineData("pkg:maven/org.apache.log4j/log4j-core@2.17.0", "pkg:maven/org.apache.log4j/log4j-core@2.17.0", VexProductKeyType.Purl, VexProductScope.Package)]
[InlineData("PKG:pypi/requests@2.28.0", "pkg:pypi/requests@2.28.0", VexProductKeyType.Purl, VexProductScope.Package)]
public void Canonicalize_Purl_ReturnsPackageScope(string input, string expectedKey, VexProductKeyType expectedType, VexProductScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(expectedKey, result.ProductKey);
Assert.Equal(expectedType, result.KeyType);
Assert.Equal(expectedScope, result.Scope);
Assert.Single(result.Links);
Assert.Equal("purl", result.Links[0].Type);
Assert.True(result.Links[0].IsOriginal);
}
[Theory]
[InlineData("cpe:2.3:a:apache:log4j:2.14.0:*:*:*:*:*:*:*", "cpe:2.3:a:apache:log4j:2.14.0:*:*:*:*:*:*:*", VexProductKeyType.Cpe, VexProductScope.Component)]
[InlineData("cpe:/a:apache:log4j:2.14.0", "cpe:/a:apache:log4j:2.14.0", VexProductKeyType.Cpe, VexProductScope.Component)]
[InlineData("CPE:2.3:a:vendor:product:1.0", "cpe:2.3:a:vendor:product:1.0", VexProductKeyType.Cpe, VexProductScope.Component)]
public void Canonicalize_Cpe_ReturnsComponentScope(string input, string expectedKey, VexProductKeyType expectedType, VexProductScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(expectedKey, result.ProductKey);
Assert.Equal(expectedType, result.KeyType);
Assert.Equal(expectedScope, result.Scope);
Assert.Equal("cpe", result.Links[0].Type);
}
[Theory]
[InlineData("openssl-3.0.9-1.el9.x86_64", VexProductKeyType.RpmNevra, VexProductScope.OsPackage)]
[InlineData("kernel-5.14.0-284.25.1.el9_2.x86_64", VexProductKeyType.RpmNevra, VexProductScope.OsPackage)]
public void Canonicalize_RpmNevra_ReturnsOsPackageScope(string input, VexProductKeyType expectedType, VexProductScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.StartsWith("rpm:", result.ProductKey);
Assert.Equal(expectedType, result.KeyType);
Assert.Equal(expectedScope, result.Scope);
Assert.Equal("rpmnevra", result.Links[0].Type);
}
[Theory]
[InlineData("oci:ghcr.io/example/app@sha256:abc123", VexProductKeyType.OciImage, VexProductScope.Container)]
[InlineData("oci:docker.io/library/nginx:1.25", VexProductKeyType.OciImage, VexProductScope.Container)]
public void Canonicalize_OciImage_ReturnsContainerScope(string input, VexProductKeyType expectedType, VexProductScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(input, result.ProductKey);
Assert.Equal(expectedType, result.KeyType);
Assert.Equal(expectedScope, result.Scope);
Assert.Equal("ociimage", result.Links[0].Type);
}
[Theory]
[InlineData("platform:redhat:rhel:9", VexProductKeyType.Platform, VexProductScope.Platform)]
[InlineData("platform:ubuntu:jammy:22.04", VexProductKeyType.Platform, VexProductScope.Platform)]
public void Canonicalize_Platform_ReturnsPlatformScope(string input, VexProductKeyType expectedType, VexProductScope expectedScope)
{
var result = _canonicalizer.Canonicalize(input);
Assert.Equal(input, result.ProductKey);
Assert.Equal(expectedType, result.KeyType);
Assert.Equal(expectedScope, result.Scope);
Assert.Equal("platform", result.Links[0].Type);
}
[Fact]
public void Canonicalize_WithPurl_PrefersPurlAsCanonicalKey()
{
var result = _canonicalizer.Canonicalize(
originalKey: "openssl-3.0.9",
purl: "pkg:rpm/redhat/openssl@3.0.9");
Assert.Equal("pkg:rpm/redhat/openssl@3.0.9", result.ProductKey);
Assert.Equal(VexProductScope.Package, result.Scope);
Assert.Equal(2, result.Links.Length);
var original = result.Links.Single(l => l.IsOriginal);
Assert.Equal("openssl-3.0.9", original.Identifier);
var purlLink = result.Links.Single(l => l.Type == "purl");
Assert.Equal("pkg:rpm/redhat/openssl@3.0.9", purlLink.Identifier);
}
[Fact]
public void Canonicalize_WithCpe_PrefersCpeWhenNoPurl()
{
var result = _canonicalizer.Canonicalize(
originalKey: "openssl",
cpe: "cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*");
Assert.Equal("cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*", result.ProductKey);
Assert.Equal(VexProductScope.Component, result.Scope);
Assert.Equal(2, result.Links.Length);
}
[Fact]
public void Canonicalize_WithComponentIdentifiers_PreservesAllLinks()
{
var componentIds = new[]
{
"pkg:rpm/redhat/openssl@3.0.9",
"cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*"
};
var result = _canonicalizer.Canonicalize(
originalKey: "openssl-3.0.9",
componentIdentifiers: componentIds);
// PURL should be chosen as canonical key
Assert.Equal("pkg:rpm/redhat/openssl@3.0.9", result.ProductKey);
Assert.Equal(3, result.Links.Length);
var original = result.Links.Single(l => l.IsOriginal);
Assert.Equal("openssl-3.0.9", original.Identifier);
}
[Fact]
public void Canonicalize_WithDuplicates_DeduplicatesLinks()
{
var componentIds = new[]
{
"pkg:npm/leftpad@1.0.0",
"pkg:npm/leftpad@1.0.0", // Duplicate
};
var result = _canonicalizer.Canonicalize(
originalKey: "pkg:npm/leftpad@1.0.0",
componentIdentifiers: componentIds);
Assert.Single(result.Links);
}
[Fact]
public void Canonicalize_UnknownFormat_ReturnsOtherType()
{
var result = _canonicalizer.Canonicalize("some-custom-product-id");
Assert.Equal("product:some-custom-product-id", result.ProductKey);
Assert.Equal(VexProductKeyType.Other, result.KeyType);
Assert.Equal(VexProductScope.Unknown, result.Scope);
Assert.Equal("other", result.Links[0].Type);
}
[Fact]
public void Canonicalize_NullInput_ThrowsArgumentException()
{
Assert.ThrowsAny<ArgumentException>(() => _canonicalizer.Canonicalize(null!));
}
[Fact]
public void Canonicalize_EmptyInput_ThrowsArgumentException()
{
Assert.ThrowsAny<ArgumentException>(() => _canonicalizer.Canonicalize(""));
}
[Fact]
public void ExtractPurlFromIdentifiers_WithPurl_ReturnsPurl()
{
var identifiers = new[]
{
"cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*",
"pkg:rpm/redhat/openssl@3.0.9",
"openssl-3.0.9"
};
var purl = _canonicalizer.ExtractPurlFromIdentifiers(identifiers);
Assert.Equal("pkg:rpm/redhat/openssl@3.0.9", purl);
}
[Fact]
public void ExtractPurlFromIdentifiers_WithoutPurl_ReturnsNull()
{
var identifiers = new[]
{
"cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*",
"openssl-3.0.9"
};
var purl = _canonicalizer.ExtractPurlFromIdentifiers(identifiers);
Assert.Null(purl);
}
[Fact]
public void ExtractPurlFromIdentifiers_NullInput_ReturnsNull()
{
var purl = _canonicalizer.ExtractPurlFromIdentifiers(null);
Assert.Null(purl);
}
[Fact]
public void OriginalKey_ReturnsOriginalIdentifier()
{
var result = _canonicalizer.Canonicalize("pkg:npm/leftpad@1.0.0");
Assert.Equal("pkg:npm/leftpad@1.0.0", result.OriginalKey);
}
[Fact]
public void Purl_ReturnsPurlLink()
{
var result = _canonicalizer.Canonicalize(
originalKey: "openssl",
purl: "pkg:rpm/redhat/openssl@3.0.9");
Assert.Equal("pkg:rpm/redhat/openssl@3.0.9", result.Purl);
}
[Fact]
public void Cpe_ReturnsCpeLink()
{
var result = _canonicalizer.Canonicalize(
originalKey: "openssl",
cpe: "cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*");
Assert.Equal("cpe:2.3:a:openssl:openssl:3.0.9:*:*:*:*:*:*:*", result.Cpe);
}
}

View File

@@ -11,6 +11,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />

View File

@@ -0,0 +1,156 @@
using System;
using System.Collections.Immutable;
using StellaOps.Excititor.Core.Observations;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests;
public class TimelineEventTests
{
[Fact]
public void Constructor_NormalizesFields_AndPreservesValues()
{
var now = DateTimeOffset.UtcNow;
var attributes = ImmutableDictionary<string, string>.Empty
.Add("key1", "value1")
.Add("key2", "value2");
var evt = new TimelineEvent(
eventId: " evt-001 ",
tenant: " TENANT-A ",
providerId: " provider-x ",
streamId: " csaf ",
eventType: " vex.observation.ingested ",
traceId: " trace-abc-123 ",
justificationSummary: " Component not present in runtime ",
createdAt: now,
evidenceHash: " sha256:deadbeef ",
payloadHash: " sha256:cafebabe ",
attributes: attributes);
Assert.Equal("evt-001", evt.EventId);
Assert.Equal("tenant-a", evt.Tenant); // lowercase
Assert.Equal("provider-x", evt.ProviderId);
Assert.Equal("csaf", evt.StreamId);
Assert.Equal("vex.observation.ingested", evt.EventType);
Assert.Equal("trace-abc-123", evt.TraceId);
Assert.Equal("Component not present in runtime", evt.JustificationSummary);
Assert.Equal(now, evt.CreatedAt);
Assert.Equal("sha256:deadbeef", evt.EvidenceHash);
Assert.Equal("sha256:cafebabe", evt.PayloadHash);
Assert.Equal(2, evt.Attributes.Count);
Assert.Equal("value1", evt.Attributes["key1"]);
}
[Fact]
public void Constructor_ThrowsOnNullOrWhiteSpaceRequiredFields()
{
var now = DateTimeOffset.UtcNow;
Assert.Throws<ArgumentException>(() => new TimelineEvent(
eventId: null!,
tenant: "tenant",
providerId: "provider",
streamId: "stream",
eventType: "type",
traceId: "trace",
justificationSummary: "summary",
createdAt: now));
Assert.Throws<ArgumentException>(() => new TimelineEvent(
eventId: " ",
tenant: "tenant",
providerId: "provider",
streamId: "stream",
eventType: "type",
traceId: "trace",
justificationSummary: "summary",
createdAt: now));
Assert.Throws<ArgumentException>(() => new TimelineEvent(
eventId: "evt-001",
tenant: null!,
providerId: "provider",
streamId: "stream",
eventType: "type",
traceId: "trace",
justificationSummary: "summary",
createdAt: now));
}
[Fact]
public void Constructor_HandlesNullOptionalFields()
{
var now = DateTimeOffset.UtcNow;
var evt = new TimelineEvent(
eventId: "evt-001",
tenant: "tenant-a",
providerId: "provider-x",
streamId: "csaf",
eventType: "vex.observation.ingested",
traceId: "trace-abc-123",
justificationSummary: null!,
createdAt: now,
evidenceHash: null,
payloadHash: null,
attributes: null);
Assert.Equal(string.Empty, evt.JustificationSummary);
Assert.Null(evt.EvidenceHash);
Assert.Null(evt.PayloadHash);
Assert.Empty(evt.Attributes);
}
[Fact]
public void Constructor_FiltersNullAttributeKeysAndValues()
{
var now = DateTimeOffset.UtcNow;
var attributes = ImmutableDictionary<string, string>.Empty
.Add("valid-key", "valid-value")
.Add(" ", "bad-key")
.Add("null-value", null!);
var evt = new TimelineEvent(
eventId: "evt-001",
tenant: "tenant-a",
providerId: "provider-x",
streamId: "csaf",
eventType: "vex.observation.ingested",
traceId: "trace-abc-123",
justificationSummary: "summary",
createdAt: now,
attributes: attributes);
// Only valid key-value pair should remain
Assert.Single(evt.Attributes);
Assert.True(evt.Attributes.ContainsKey("valid-key"));
}
[Fact]
public void EventTypes_Constants_AreCorrect()
{
Assert.Equal("vex.observation.ingested", VexTimelineEventTypes.ObservationIngested);
Assert.Equal("vex.observation.updated", VexTimelineEventTypes.ObservationUpdated);
Assert.Equal("vex.observation.superseded", VexTimelineEventTypes.ObservationSuperseded);
Assert.Equal("vex.linkset.created", VexTimelineEventTypes.LinksetCreated);
Assert.Equal("vex.linkset.updated", VexTimelineEventTypes.LinksetUpdated);
Assert.Equal("vex.linkset.conflict_detected", VexTimelineEventTypes.LinksetConflictDetected);
Assert.Equal("vex.linkset.conflict_resolved", VexTimelineEventTypes.LinksetConflictResolved);
Assert.Equal("vex.evidence.sealed", VexTimelineEventTypes.EvidenceSealed);
Assert.Equal("vex.attestation.attached", VexTimelineEventTypes.AttestationAttached);
Assert.Equal("vex.attestation.verified", VexTimelineEventTypes.AttestationVerified);
}
[Fact]
public void AttributeKeys_Constants_AreCorrect()
{
Assert.Equal("observation_id", VexTimelineEventAttributes.ObservationId);
Assert.Equal("linkset_id", VexTimelineEventAttributes.LinksetId);
Assert.Equal("vulnerability_id", VexTimelineEventAttributes.VulnerabilityId);
Assert.Equal("product_key", VexTimelineEventAttributes.ProductKey);
Assert.Equal("status", VexTimelineEventAttributes.Status);
Assert.Equal("conflict_type", VexTimelineEventAttributes.ConflictType);
Assert.Equal("attestation_id", VexTimelineEventAttributes.AttestationId);
}
}

View File

@@ -0,0 +1,209 @@
using System;
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Attestation.Evidence;
using StellaOps.Excititor.Attestation.Signing;
using StellaOps.Excititor.Core.Evidence;
using StellaOps.Excititor.Core.Observations;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests;
public class VexEvidenceAttestorTests
{
[Fact]
public async Task AttestManifestAsync_CreatesValidAttestation()
{
var signer = new FakeSigner();
var attestor = new VexEvidenceAttestor(signer, NullLogger<VexEvidenceAttestor>.Instance);
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:0000000000000000000000000000000000000000000000000000000000000001",
"linkset-1");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "locker:excititor:test-tenant:2025-11-27:0001",
createdAt: DateTimeOffset.Parse("2025-11-27T10:00:00Z"),
items: new[] { item });
var result = await attestor.AttestManifestAsync(manifest);
Assert.NotNull(result);
Assert.NotNull(result.SignedManifest);
Assert.NotNull(result.DsseEnvelopeJson);
Assert.StartsWith("sha256:", result.DsseEnvelopeHash);
Assert.StartsWith("attest:evidence:test-tenant:", result.AttestationId);
Assert.NotNull(result.SignedManifest.Signature);
}
[Fact]
public async Task AttestManifestAsync_EnvelopeContainsCorrectPayload()
{
var signer = new FakeSigner();
var attestor = new VexEvidenceAttestor(signer, NullLogger<VexEvidenceAttestor>.Instance);
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:abc123",
"linkset-1");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var result = await attestor.AttestManifestAsync(manifest);
var envelope = JsonSerializer.Deserialize<JsonObject>(result.DsseEnvelopeJson);
Assert.NotNull(envelope);
Assert.Equal("application/vnd.in-toto+json", envelope["payloadType"]?.GetValue<string>());
var payload = Convert.FromBase64String(envelope["payload"]?.GetValue<string>() ?? "");
var statement = JsonSerializer.Deserialize<JsonObject>(payload);
Assert.NotNull(statement);
Assert.Equal(VexEvidenceInTotoStatement.InTotoStatementType, statement["_type"]?.GetValue<string>());
Assert.Equal(VexEvidenceInTotoStatement.EvidenceLockerPredicateType, statement["predicateType"]?.GetValue<string>());
}
[Fact]
public async Task VerifyAttestationAsync_ReturnsValidForCorrectAttestation()
{
var signer = new FakeSigner();
var attestor = new VexEvidenceAttestor(signer, NullLogger<VexEvidenceAttestor>.Instance);
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:abc123",
"linkset-1");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var attestation = await attestor.AttestManifestAsync(manifest);
var verification = await attestor.VerifyAttestationAsync(manifest, attestation.DsseEnvelopeJson);
Assert.True(verification.IsValid);
Assert.Null(verification.FailureReason);
Assert.True(verification.Diagnostics.ContainsKey("envelope_hash"));
}
[Fact]
public async Task VerifyAttestationAsync_ReturnsInvalidForWrongManifest()
{
var signer = new FakeSigner();
var attestor = new VexEvidenceAttestor(signer, NullLogger<VexEvidenceAttestor>.Instance);
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:abc123",
"linkset-1");
var manifest1 = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "manifest-1",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var manifest2 = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "manifest-2",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var attestation = await attestor.AttestManifestAsync(manifest1);
var verification = await attestor.VerifyAttestationAsync(manifest2, attestation.DsseEnvelopeJson);
Assert.False(verification.IsValid);
Assert.Contains("Manifest ID mismatch", verification.FailureReason);
}
[Fact]
public async Task VerifyAttestationAsync_ReturnsInvalidForInvalidJson()
{
var signer = new FakeSigner();
var attestor = new VexEvidenceAttestor(signer, NullLogger<VexEvidenceAttestor>.Instance);
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:abc123",
"linkset-1");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var verification = await attestor.VerifyAttestationAsync(manifest, "not valid json");
Assert.False(verification.IsValid);
Assert.Contains("JSON parse error", verification.FailureReason);
}
[Fact]
public async Task VerifyAttestationAsync_ReturnsInvalidForEmptyEnvelope()
{
var signer = new FakeSigner();
var attestor = new VexEvidenceAttestor(signer, NullLogger<VexEvidenceAttestor>.Instance);
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:abc123",
"linkset-1");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var verification = await attestor.VerifyAttestationAsync(manifest, "");
Assert.False(verification.IsValid);
Assert.Equal("DSSE envelope is required.", verification.FailureReason);
}
[Fact]
public void VexEvidenceAttestationPredicate_FromManifest_CapturesAllFields()
{
var item = new VexEvidenceSnapshotItem(
"obs-001",
"provider-a",
"sha256:abc123",
"linkset-1");
var metadata = ImmutableDictionary<string, string>.Empty.Add("sealed", "true");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "test-manifest",
createdAt: DateTimeOffset.Parse("2025-11-27T10:00:00Z"),
items: new[] { item },
metadata: metadata);
var predicate = VexEvidenceAttestationPredicate.FromManifest(manifest);
Assert.Equal("test-manifest", predicate.ManifestId);
Assert.Equal("test-tenant", predicate.Tenant);
Assert.Equal(manifest.MerkleRoot, predicate.MerkleRoot);
Assert.Equal(1, predicate.ItemCount);
Assert.Equal("true", predicate.Metadata["sealed"]);
}
private sealed class FakeSigner : IVexSigner
{
public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
{
var signature = Convert.ToBase64String(payload.Span.ToArray());
return ValueTask.FromResult(new VexSignedPayload(signature, "fake-key-001"));
}
}
}

View File

@@ -0,0 +1,199 @@
using System;
using System.Collections.Immutable;
using System.Text.Json.Nodes;
using StellaOps.Excititor.Core.Evidence;
using StellaOps.Excititor.Core.Observations;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests;
public class VexEvidenceLockerTests
{
[Fact]
public void VexEvidenceSnapshotItem_NormalizesFields()
{
var item = new VexEvidenceSnapshotItem(
observationId: " obs-001 ",
providerId: " PROVIDER-A ",
contentHash: " sha256:abc123 ",
linksetId: " CVE-2024-0001:pkg:npm/lodash ");
Assert.Equal("obs-001", item.ObservationId);
Assert.Equal("provider-a", item.ProviderId);
Assert.Equal("sha256:abc123", item.ContentHash);
Assert.Equal("CVE-2024-0001:pkg:npm/lodash", item.LinksetId);
Assert.Null(item.DsseEnvelopeHash);
Assert.Equal("ingest", item.Provenance.Source);
}
[Fact]
public void VexEvidenceProvenance_CreatesCorrectProvenance()
{
var provenance = new VexEvidenceProvenance("mirror", 5, "sha256:manifest123");
Assert.Equal("mirror", provenance.Source);
Assert.Equal(5, provenance.MirrorGeneration);
Assert.Equal("sha256:manifest123", provenance.ExportCenterManifest);
}
[Fact]
public void VexLockerManifest_SortsItemsDeterministically()
{
var item1 = new VexEvidenceSnapshotItem("obs-002", "provider-b", "sha256:bbb", "linkset-1");
var item2 = new VexEvidenceSnapshotItem("obs-001", "provider-a", "sha256:aaa", "linkset-1");
var item3 = new VexEvidenceSnapshotItem("obs-001", "provider-b", "sha256:ccc", "linkset-2");
var manifest = new VexLockerManifest(
tenant: "test-tenant",
manifestId: "locker:excititor:test:2025-11-27:0001",
createdAt: DateTimeOffset.Parse("2025-11-27T10:00:00Z"),
items: new[] { item1, item2, item3 });
// Should be sorted by observationId, then providerId
Assert.Equal(3, manifest.Items.Length);
Assert.Equal("obs-001", manifest.Items[0].ObservationId);
Assert.Equal("provider-a", manifest.Items[0].ProviderId);
Assert.Equal("obs-001", manifest.Items[1].ObservationId);
Assert.Equal("provider-b", manifest.Items[1].ProviderId);
Assert.Equal("obs-002", manifest.Items[2].ObservationId);
}
[Fact]
public void VexLockerManifest_ComputesMerkleRoot()
{
var item1 = new VexEvidenceSnapshotItem("obs-001", "provider-a", "sha256:0000000000000000000000000000000000000000000000000000000000000001", "linkset-1");
var item2 = new VexEvidenceSnapshotItem("obs-002", "provider-a", "sha256:0000000000000000000000000000000000000000000000000000000000000002", "linkset-1");
var manifest = new VexLockerManifest(
tenant: "test",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item1, item2 });
Assert.StartsWith("sha256:", manifest.MerkleRoot);
Assert.Equal(71, manifest.MerkleRoot.Length); // "sha256:" + 64 hex chars
}
[Fact]
public void VexLockerManifest_CreateManifestId_GeneratesCorrectFormat()
{
var id = VexLockerManifest.CreateManifestId("TestTenant", DateTimeOffset.Parse("2025-11-27T15:30:00Z"), 42);
Assert.Equal("locker:excititor:testtenant:2025-11-27:0042", id);
}
[Fact]
public void VexLockerManifest_WithSignature_PreservesData()
{
var item = new VexEvidenceSnapshotItem("obs-001", "provider-a", "sha256:abc123", "linkset-1");
var manifest = new VexLockerManifest(
tenant: "test",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var signed = manifest.WithSignature("dsse-signature-base64");
Assert.Null(manifest.Signature);
Assert.Equal("dsse-signature-base64", signed.Signature);
Assert.Equal(manifest.MerkleRoot, signed.MerkleRoot);
Assert.Equal(manifest.Items.Length, signed.Items.Length);
}
[Fact]
public void VexEvidenceLockerService_CreateSnapshotItem_FromObservation()
{
var observation = BuildTestObservation("obs-001", "provider-a", "sha256:content123");
var service = new VexEvidenceLockerService();
var item = service.CreateSnapshotItem(observation, "linkset-001");
Assert.Equal("obs-001", item.ObservationId);
Assert.Equal("provider-a", item.ProviderId);
Assert.Equal("sha256:content123", item.ContentHash);
Assert.Equal("linkset-001", item.LinksetId);
}
[Fact]
public void VexEvidenceLockerService_BuildManifest_CreatesValidManifest()
{
var obs1 = BuildTestObservation("obs-001", "provider-a", "sha256:aaa");
var obs2 = BuildTestObservation("obs-002", "provider-b", "sha256:bbb");
var service = new VexEvidenceLockerService();
var manifest = service.BuildManifest(
tenant: "test-tenant",
observations: new[] { obs2, obs1 },
linksetIdSelector: o => $"linkset:{o.ObservationId}",
timestamp: DateTimeOffset.Parse("2025-11-27T10:00:00Z"),
sequence: 1,
isSealed: true);
Assert.Equal("test-tenant", manifest.Tenant);
Assert.Equal("locker:excititor:test-tenant:2025-11-27:0001", manifest.ManifestId);
Assert.Equal(2, manifest.Items.Length);
Assert.Equal("obs-001", manifest.Items[0].ObservationId); // sorted
Assert.Equal("true", manifest.Metadata["sealed"]);
}
[Fact]
public void VexEvidenceLockerService_VerifyManifest_ReturnsTrueForValidManifest()
{
var item = new VexEvidenceSnapshotItem("obs-001", "provider-a", "sha256:0000000000000000000000000000000000000000000000000000000000000001", "linkset-1");
var manifest = new VexLockerManifest(
tenant: "test",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: new[] { item });
var service = new VexEvidenceLockerService();
Assert.True(service.VerifyManifest(manifest));
}
[Fact]
public void VexLockerManifest_EmptyItems_ProducesEmptyMerkleRoot()
{
var manifest = new VexLockerManifest(
tenant: "test",
manifestId: "test-manifest",
createdAt: DateTimeOffset.UtcNow,
items: Array.Empty<VexEvidenceSnapshotItem>());
Assert.StartsWith("sha256:", manifest.MerkleRoot);
Assert.Empty(manifest.Items);
}
private static VexObservation BuildTestObservation(string id, string provider, string contentHash)
{
var upstream = new VexObservationUpstream(
upstreamId: $"upstream-{id}",
documentVersion: "1",
fetchedAt: DateTimeOffset.UtcNow,
receivedAt: DateTimeOffset.UtcNow,
contentHash: contentHash,
signature: new VexObservationSignature(false, null, null, null));
var content = new VexObservationContent(
format: "openvex",
specVersion: "1.0.0",
raw: JsonNode.Parse("{}")!,
metadata: null);
var linkset = new VexObservationLinkset(
aliases: Array.Empty<string>(),
purls: Array.Empty<string>(),
cpes: Array.Empty<string>(),
references: Array.Empty<VexObservationReference>());
return new VexObservation(
observationId: id,
tenant: "test",
providerId: provider,
streamId: "ingest",
upstream: upstream,
statements: ImmutableArray<VexObservationStatement>.Empty,
content: content,
linkset: linkset,
createdAt: DateTimeOffset.UtcNow);
}
}

View File

@@ -0,0 +1,199 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using EphemeralMongo;
using MongoRunner = EphemeralMongo.MongoRunner;
using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions;
using StellaOps.Excititor.Attestation.Signing;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Policy;
using StellaOps.Excititor.Core;
namespace StellaOps.Excititor.WebService.Tests;
/// <summary>
/// Tests for OpenAPI discovery endpoints (WEB-OAS-61-001).
/// Validates /.well-known/openapi and /openapi/excititor.json endpoints.
/// </summary>
public sealed class OpenApiDiscoveryEndpointTests : IDisposable
{
private readonly TestWebApplicationFactory _factory;
private readonly IMongoRunner _runner;
public OpenApiDiscoveryEndpointTests()
{
_runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true });
_factory = new TestWebApplicationFactory(
configureConfiguration: config =>
{
var rootPath = Path.Combine(Path.GetTempPath(), "excititor-openapi-tests");
Directory.CreateDirectory(rootPath);
var settings = new Dictionary<string, string?>
{
["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString,
["Excititor:Storage:Mongo:DatabaseName"] = "excititor-openapi-tests",
["Excititor:Storage:Mongo:RawBucketName"] = "vex.raw",
["Excititor:Storage:Mongo:GridFsInlineThresholdBytes"] = "256",
["Excititor:Artifacts:FileSystem:RootPath"] = rootPath,
};
config.AddInMemoryCollection(settings!);
},
configureServices: services =>
{
TestServiceOverrides.Apply(services);
services.AddSingleton<IVexSigner, FakeSigner>();
services.AddSingleton<IVexPolicyEvaluator, FakePolicyEvaluator>();
services.AddSingleton(new VexConnectorDescriptor("excititor:test", VexProviderKind.Distro, "Test Connector"));
});
}
[Fact]
public async Task WellKnownOpenApi_ReturnsServiceMetadata()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/.well-known/openapi");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var json = await response.Content.ReadAsStringAsync();
var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
Assert.Equal("excititor", root.GetProperty("service").GetString());
Assert.Equal("3.1.0", root.GetProperty("specVersion").GetString());
Assert.Equal("application/json", root.GetProperty("format").GetString());
Assert.Equal("/openapi/excititor.json", root.GetProperty("url").GetString());
Assert.Equal("#/components/schemas/Error", root.GetProperty("errorEnvelopeSchema").GetString());
Assert.True(root.TryGetProperty("version", out _), "Response should include version");
}
[Fact]
public async Task OpenApiSpec_ReturnsValidOpenApi31Document()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/openapi/excititor.json");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var json = await response.Content.ReadAsStringAsync();
var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
// Verify OpenAPI version
Assert.Equal("3.1.0", root.GetProperty("openapi").GetString());
// Verify info object
var info = root.GetProperty("info");
Assert.Equal("StellaOps Excititor API", info.GetProperty("title").GetString());
Assert.True(info.TryGetProperty("version", out _), "Info should include version");
Assert.True(info.TryGetProperty("description", out _), "Info should include description");
// Verify paths exist
Assert.True(root.TryGetProperty("paths", out var paths), "Spec should include paths");
Assert.True(paths.TryGetProperty("/excititor/status", out _), "Paths should include /excititor/status");
}
[Fact]
public async Task OpenApiSpec_IncludesErrorSchemaComponent()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/openapi/excititor.json");
var json = await response.Content.ReadAsStringAsync();
var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
// Verify components/schemas/Error exists
Assert.True(root.TryGetProperty("components", out var components), "Spec should include components");
Assert.True(components.TryGetProperty("schemas", out var schemas), "Components should include schemas");
Assert.True(schemas.TryGetProperty("Error", out var errorSchema), "Schemas should include Error");
// Verify Error schema structure
Assert.Equal("object", errorSchema.GetProperty("type").GetString());
Assert.True(errorSchema.TryGetProperty("properties", out var props), "Error schema should have properties");
Assert.True(props.TryGetProperty("error", out _), "Error schema should have error property");
}
[Fact]
public async Task OpenApiSpec_IncludesTimelineEndpoint()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/openapi/excititor.json");
var json = await response.Content.ReadAsStringAsync();
var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
var paths = root.GetProperty("paths");
Assert.True(paths.TryGetProperty("/obs/excititor/timeline", out var timelinePath),
"Paths should include /obs/excititor/timeline");
// Verify it has a GET operation
Assert.True(timelinePath.TryGetProperty("get", out var getOp), "Timeline path should have GET operation");
Assert.True(getOp.TryGetProperty("summary", out _), "GET operation should have summary");
}
[Fact]
public async Task OpenApiSpec_IncludesLinkHeaderExample()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/openapi/excititor.json");
var json = await response.Content.ReadAsStringAsync();
// Verify the spec contains a Link header reference for OpenAPI describedby
// JSON escapes quotes, so check for the essential parts
Assert.Contains("/openapi/excititor.json", json);
Assert.Contains("describedby", json);
}
[Fact]
public async Task WellKnownOpenApi_ContentTypeIsJson()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/.well-known/openapi");
Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType);
}
[Fact]
public async Task OpenApiSpec_ContentTypeIsJson()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/openapi/excititor.json");
Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType);
}
public void Dispose()
{
_factory.Dispose();
_runner.Dispose();
}
private sealed class FakeSigner : IVexSigner
{
public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexSignedPayload("signature", "key"));
}
private sealed class FakePolicyEvaluator : IVexPolicyEvaluator
{
public string Version => "test";
public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default;
public double GetProviderWeight(VexProvider provider) => 1.0;
public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason)
{
rejectionReason = null;
return true;
}
}
}

View File

@@ -40,5 +40,6 @@
<Compile Include="GraphStatusFactoryTests.cs" />
<Compile Include="GraphTooltipFactoryTests.cs" />
<Compile Include="AttestationVerifyEndpointTests.cs" />
<Compile Include="OpenApiDiscoveryEndpointTests.cs" />
</ItemGroup>
</Project>

View File

@@ -16,7 +16,9 @@ using StellaOps.Aoc;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Aoc;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Worker.Options;
using StellaOps.Excititor.Worker.Orchestration;
using StellaOps.Excititor.Worker.Scheduling;
using StellaOps.Excititor.Worker.Signature;
using StellaOps.Plugin;
@@ -115,7 +117,8 @@ public sealed class DefaultVexProviderRunnerIntegrationTests : IAsyncLifetime
storedCount.Should().Be(9); // documents before the failing digest persist
guard.FailDigest = null;
time.Advance(TimeSpan.FromMinutes(10));
// Advance past the quarantine duration (30 mins) since AOC guard failures are non-retryable
time.Advance(TimeSpan.FromMinutes(35));
await runner.RunAsync(schedule, CancellationToken.None);
var finalCount = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
@@ -177,12 +180,23 @@ public sealed class DefaultVexProviderRunnerIntegrationTests : IAsyncLifetime
},
};
var orchestratorOptions = Microsoft.Extensions.Options.Options.Create(new VexWorkerOrchestratorOptions { Enabled = false });
var orchestratorClient = new NoopOrchestratorClient();
var heartbeatService = new VexWorkerHeartbeatService(
orchestratorClient,
orchestratorOptions,
timeProvider,
NullLogger<VexWorkerHeartbeatService>.Instance);
return new DefaultVexProviderRunner(
services,
new PluginCatalog(),
orchestratorClient,
heartbeatService,
NullLogger<DefaultVexProviderRunner>.Instance,
timeProvider,
Microsoft.Extensions.Options.Options.Create(options));
Microsoft.Extensions.Options.Options.Create(options),
orchestratorOptions);
}
private static List<DocumentSpec> CreateDocumentSpecs(int count)
@@ -330,6 +344,39 @@ public sealed class DefaultVexProviderRunnerIntegrationTests : IAsyncLifetime
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopOrchestratorClient : IVexWorkerOrchestratorClient
{
public ValueTask<VexWorkerJobContext> StartJobAsync(string tenant, string connectorId, string? checkpoint, CancellationToken cancellationToken = default)
=> ValueTask.FromResult(new VexWorkerJobContext(tenant, connectorId, Guid.NewGuid(), checkpoint, DateTimeOffset.UtcNow));
public ValueTask SendHeartbeatAsync(VexWorkerJobContext context, VexWorkerHeartbeat heartbeat, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask RecordArtifactAsync(VexWorkerJobContext context, VexWorkerArtifact artifact, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask CompleteJobAsync(VexWorkerJobContext context, VexWorkerJobResult result, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask FailJobAsync(VexWorkerJobContext context, string errorCode, string? errorMessage, int? retryAfterSeconds, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask FailJobAsync(VexWorkerJobContext context, VexWorkerError error, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask<VexWorkerCommand?> GetPendingCommandAsync(VexWorkerJobContext context, CancellationToken cancellationToken = default)
=> ValueTask.FromResult<VexWorkerCommand?>(null);
public ValueTask AcknowledgeCommandAsync(VexWorkerJobContext context, long commandSequence, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask SaveCheckpointAsync(VexWorkerJobContext context, VexWorkerCheckpoint checkpoint, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask<VexWorkerCheckpoint?> LoadCheckpointAsync(string connectorId, CancellationToken cancellationToken = default)
=> ValueTask.FromResult<VexWorkerCheckpoint?>(null);
}
private sealed class DirectSessionProvider : IVexMongoSessionProvider
{
private readonly IMongoClient _client;

View File

@@ -19,13 +19,15 @@ using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Aoc;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Worker.Options;
using StellaOps.Excititor.Worker.Scheduling;
using StellaOps.Excititor.Worker.Signature;
using StellaOps.Aoc;
using Xunit;
using System.Runtime.CompilerServices;
using StellaOps.IssuerDirectory.Client;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Worker.Options;
using StellaOps.Excititor.Worker.Orchestration;
using StellaOps.Excititor.Worker.Scheduling;
using StellaOps.Excititor.Worker.Signature;
using StellaOps.Aoc;
using Xunit;
using System.Runtime.CompilerServices;
using StellaOps.IssuerDirectory.Client;
namespace StellaOps.Excititor.Worker.Tests;
@@ -286,12 +288,12 @@ public sealed class DefaultVexProviderRunnerTests
.Add("verification.issuer", "issuer-from-verifier")
.Add("verification.keyId", "key-from-verifier");
var attestationVerifier = new StubAttestationVerifier(true, diagnostics);
var signatureVerifier = new WorkerSignatureVerifier(
NullLogger<WorkerSignatureVerifier>.Instance,
attestationVerifier,
time,
TestIssuerDirectoryClient.Instance);
var attestationVerifier = new StubAttestationVerifier(true, diagnostics);
var signatureVerifier = new WorkerSignatureVerifier(
NullLogger<WorkerSignatureVerifier>.Instance,
attestationVerifier,
time,
TestIssuerDirectoryClient.Instance);
var connector = TestConnector.WithDocuments("excititor:test", document);
var stateRepository = new InMemoryStateRepository();
@@ -332,6 +334,45 @@ public sealed class DefaultVexProviderRunnerTests
{
var now = new DateTimeOffset(2025, 10, 21, 17, 0, 0, TimeSpan.Zero);
var time = new FixedTimeProvider(now);
// Use a network exception which is classified as retryable
var connector = TestConnector.Failure("excititor:test", new System.Net.Http.HttpRequestException("network failure"));
var stateRepository = new InMemoryStateRepository();
stateRepository.Save(new VexConnectorState(
"excititor:test",
LastUpdated: now.AddDays(-2),
DocumentDigests: ImmutableArray<string>.Empty,
ResumeTokens: ImmutableDictionary<string, string>.Empty,
LastSuccessAt: now.AddDays(-1),
FailureCount: 1,
NextEligibleRun: null,
LastFailureReason: null));
var services = CreateServiceProvider(connector, stateRepository);
var runner = CreateRunner(services, time, options =>
{
options.Retry.BaseDelay = TimeSpan.FromMinutes(5);
options.Retry.MaxDelay = TimeSpan.FromMinutes(60);
options.Retry.FailureThreshold = 3;
options.Retry.QuarantineDuration = TimeSpan.FromHours(12);
options.Retry.JitterRatio = 0;
});
await Assert.ThrowsAsync<System.Net.Http.HttpRequestException>(async () => await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None).AsTask());
var state = stateRepository.Get("excititor:test");
state.Should().NotBeNull();
state!.FailureCount.Should().Be(2);
state.LastFailureReason.Should().Be("network failure");
// Exponential backoff: 5 mins * 2^(2-1) = 10 mins
state.NextEligibleRun.Should().Be(now + TimeSpan.FromMinutes(10));
}
[Fact]
public async Task RunAsync_NonRetryableFailure_AppliesQuarantine()
{
var now = new DateTimeOffset(2025, 10, 21, 17, 0, 0, TimeSpan.Zero);
var time = new FixedTimeProvider(now);
// InvalidOperationException is classified as non-retryable
var connector = TestConnector.Failure("excititor:test", new InvalidOperationException("boom"));
var stateRepository = new InMemoryStateRepository();
stateRepository.Save(new VexConnectorState(
@@ -360,7 +401,8 @@ public sealed class DefaultVexProviderRunnerTests
state.Should().NotBeNull();
state!.FailureCount.Should().Be(2);
state.LastFailureReason.Should().Be("boom");
state.NextEligibleRun.Should().Be(now + TimeSpan.FromMinutes(10));
// Non-retryable errors apply quarantine immediately
state.NextEligibleRun.Should().Be(now + TimeSpan.FromHours(12));
}
private static ServiceProvider CreateServiceProvider(
@@ -390,12 +432,22 @@ public sealed class DefaultVexProviderRunnerTests
{
var options = new VexWorkerOptions();
configure(options);
var orchestratorOptions = Microsoft.Extensions.Options.Options.Create(new VexWorkerOrchestratorOptions { Enabled = false });
var orchestratorClient = new NoopOrchestratorClient();
var heartbeatService = new VexWorkerHeartbeatService(
orchestratorClient,
orchestratorOptions,
timeProvider,
NullLogger<VexWorkerHeartbeatService>.Instance);
return new DefaultVexProviderRunner(
serviceProvider,
new PluginCatalog(),
orchestratorClient,
heartbeatService,
NullLogger<DefaultVexProviderRunner>.Instance,
timeProvider,
Microsoft.Extensions.Options.Options.Create(options));
Microsoft.Extensions.Options.Options.Create(options),
orchestratorOptions);
}
private sealed class FixedTimeProvider : TimeProvider
@@ -467,64 +519,97 @@ public sealed class DefaultVexProviderRunnerTests
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
private sealed class StubNormalizerRouter : IVexNormalizerRouter
{
private readonly ImmutableArray<VexClaim> _claims;
public StubNormalizerRouter(IEnumerable<VexClaim> claims)
{
_claims = claims.ToImmutableArray();
}
public int CallCount { get; private set; }
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
{
CallCount++;
return ValueTask.FromResult(new VexClaimBatch(document, _claims, ImmutableDictionary<string, string>.Empty));
}
}
private sealed class TestIssuerDirectoryClient : IIssuerDirectoryClient
{
public static TestIssuerDirectoryClient Instance { get; } = new();
private static readonly IssuerTrustResponseModel DefaultTrust = new(null, null, 1m);
public ValueTask<IReadOnlyList<IssuerKeyModel>> GetIssuerKeysAsync(
string tenantId,
string issuerId,
bool includeGlobal,
CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyList<IssuerKeyModel>>(Array.Empty<IssuerKeyModel>());
public ValueTask<IssuerTrustResponseModel> GetIssuerTrustAsync(
string tenantId,
string issuerId,
bool includeGlobal,
CancellationToken cancellationToken)
=> ValueTask.FromResult(DefaultTrust);
public ValueTask<IssuerTrustResponseModel> SetIssuerTrustAsync(
string tenantId,
string issuerId,
decimal weight,
string? reason,
CancellationToken cancellationToken)
=> ValueTask.FromResult(DefaultTrust);
public ValueTask DeleteIssuerTrustAsync(
string tenantId,
string issuerId,
string? reason,
CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
private sealed class StubNormalizerRouter : IVexNormalizerRouter
{
private readonly ImmutableArray<VexClaim> _claims;
public StubNormalizerRouter(IEnumerable<VexClaim> claims)
{
_claims = claims.ToImmutableArray();
}
public int CallCount { get; private set; }
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
{
CallCount++;
return ValueTask.FromResult(new VexClaimBatch(document, _claims, ImmutableDictionary<string, string>.Empty));
}
}
private sealed class TestIssuerDirectoryClient : IIssuerDirectoryClient
{
public static TestIssuerDirectoryClient Instance { get; } = new();
private static readonly IssuerTrustResponseModel DefaultTrust = new(null, null, 1m);
public ValueTask<IReadOnlyList<IssuerKeyModel>> GetIssuerKeysAsync(
string tenantId,
string issuerId,
bool includeGlobal,
CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyList<IssuerKeyModel>>(Array.Empty<IssuerKeyModel>());
public ValueTask<IssuerTrustResponseModel> GetIssuerTrustAsync(
string tenantId,
string issuerId,
bool includeGlobal,
CancellationToken cancellationToken)
=> ValueTask.FromResult(DefaultTrust);
public ValueTask<IssuerTrustResponseModel> SetIssuerTrustAsync(
string tenantId,
string issuerId,
decimal weight,
string? reason,
CancellationToken cancellationToken)
=> ValueTask.FromResult(DefaultTrust);
public ValueTask DeleteIssuerTrustAsync(
string tenantId,
string issuerId,
string? reason,
CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopOrchestratorClient : IVexWorkerOrchestratorClient
{
public ValueTask<VexWorkerJobContext> StartJobAsync(string tenant, string connectorId, string? checkpoint, CancellationToken cancellationToken = default)
=> ValueTask.FromResult(new VexWorkerJobContext(tenant, connectorId, Guid.NewGuid(), checkpoint, DateTimeOffset.UtcNow));
public ValueTask SendHeartbeatAsync(VexWorkerJobContext context, VexWorkerHeartbeat heartbeat, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask RecordArtifactAsync(VexWorkerJobContext context, VexWorkerArtifact artifact, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask CompleteJobAsync(VexWorkerJobContext context, VexWorkerJobResult result, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask FailJobAsync(VexWorkerJobContext context, string errorCode, string? errorMessage, int? retryAfterSeconds, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask FailJobAsync(VexWorkerJobContext context, VexWorkerError error, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask<VexWorkerCommand?> GetPendingCommandAsync(VexWorkerJobContext context, CancellationToken cancellationToken = default)
=> ValueTask.FromResult<VexWorkerCommand?>(null);
public ValueTask AcknowledgeCommandAsync(VexWorkerJobContext context, long commandSequence, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask SaveCheckpointAsync(VexWorkerJobContext context, VexWorkerCheckpoint checkpoint, CancellationToken cancellationToken = default)
=> ValueTask.CompletedTask;
public ValueTask<VexWorkerCheckpoint?> LoadCheckpointAsync(string connectorId, CancellationToken cancellationToken = default)
=> ValueTask.FromResult<VexWorkerCheckpoint?>(null);
}
private sealed class InMemoryStateRepository : IVexConnectorStateRepository
@@ -545,6 +630,9 @@ public sealed class DefaultVexProviderRunnerTests
Save(state);
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyCollection<VexConnectorState>> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> ValueTask.FromResult<IReadOnlyCollection<VexConnectorState>>(_states.Values.ToList());
}
private sealed class TestConnector : IVexConnector
@@ -670,25 +758,25 @@ public sealed class DefaultVexProviderRunnerTests
}
}
private sealed class StubAttestationVerifier : IVexAttestationVerifier
{
private readonly bool _isValid;
private readonly VexAttestationDiagnostics _diagnostics;
public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string> diagnostics)
{
_isValid = isValid;
_diagnostics = VexAttestationDiagnostics.FromBuilder(diagnostics.ToBuilder());
}
public int Invocations { get; private set; }
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
{
Invocations++;
return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics));
}
}
private sealed class StubAttestationVerifier : IVexAttestationVerifier
{
private readonly bool _isValid;
private readonly VexAttestationDiagnostics _diagnostics;
public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string> diagnostics)
{
_isValid = isValid;
_diagnostics = VexAttestationDiagnostics.FromBuilder(diagnostics.ToBuilder());
}
public int Invocations { get; private set; }
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
{
Invocations++;
return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics));
}
}
private static VexRawDocument CreateAttestationRawDocument(DateTimeOffset observedAt)
{

View File

@@ -0,0 +1,178 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Worker.Options;
using StellaOps.Excititor.Worker.Orchestration;
using Xunit;
namespace StellaOps.Excititor.Worker.Tests.Orchestration;
public class VexWorkerOrchestratorClientTests
{
private readonly InMemoryConnectorStateRepository _stateRepository = new();
private readonly FakeTimeProvider _timeProvider = new();
private readonly IOptions<VexWorkerOrchestratorOptions> _options = Microsoft.Extensions.Options.Options.Create(new VexWorkerOrchestratorOptions
{
Enabled = true,
DefaultTenant = "test-tenant"
});
[Fact]
public async Task StartJobAsync_CreatesJobContext()
{
var client = CreateClient();
var context = await client.StartJobAsync("tenant-a", "connector-001", "checkpoint-123");
Assert.NotNull(context);
Assert.Equal("tenant-a", context.Tenant);
Assert.Equal("connector-001", context.ConnectorId);
Assert.Equal("checkpoint-123", context.Checkpoint);
Assert.NotEqual(Guid.Empty, context.RunId);
}
[Fact]
public async Task SendHeartbeatAsync_UpdatesConnectorState()
{
var client = CreateClient();
var context = await client.StartJobAsync("tenant-a", "connector-001", null);
var heartbeat = new VexWorkerHeartbeat(
VexWorkerHeartbeatStatus.Running,
Progress: 50,
QueueDepth: null,
LastArtifactHash: "sha256:abc123",
LastArtifactKind: "vex-document",
ErrorCode: null,
RetryAfterSeconds: null);
await client.SendHeartbeatAsync(context, heartbeat);
var state = await _stateRepository.GetAsync("connector-001", CancellationToken.None);
Assert.NotNull(state);
Assert.Equal("Running", state.LastHeartbeatStatus);
Assert.NotNull(state.LastHeartbeatAt);
}
[Fact]
public async Task RecordArtifactAsync_TracksArtifactHash()
{
var client = CreateClient();
var context = await client.StartJobAsync("tenant-a", "connector-001", null);
var artifact = new VexWorkerArtifact(
"sha256:deadbeef",
"vex-raw-document",
"provider-001",
"doc-001",
_timeProvider.GetUtcNow());
await client.RecordArtifactAsync(context, artifact);
var state = await _stateRepository.GetAsync("connector-001", CancellationToken.None);
Assert.NotNull(state);
Assert.Equal("sha256:deadbeef", state.LastArtifactHash);
Assert.Equal("vex-raw-document", state.LastArtifactKind);
Assert.Contains("sha256:deadbeef", state.DocumentDigests);
}
[Fact]
public async Task CompleteJobAsync_UpdatesStateWithResults()
{
var client = CreateClient();
var context = await client.StartJobAsync("tenant-a", "connector-001", null);
var completedAt = _timeProvider.GetUtcNow();
var result = new VexWorkerJobResult(
DocumentsProcessed: 10,
ClaimsGenerated: 25,
LastCheckpoint: "checkpoint-new",
LastArtifactHash: "sha256:final",
CompletedAt: completedAt);
await client.CompleteJobAsync(context, result);
var state = await _stateRepository.GetAsync("connector-001", CancellationToken.None);
Assert.NotNull(state);
Assert.Equal("Succeeded", state.LastHeartbeatStatus);
Assert.Equal("checkpoint-new", state.LastCheckpoint);
Assert.Equal("sha256:final", state.LastArtifactHash);
Assert.Equal(0, state.FailureCount);
Assert.Null(state.NextEligibleRun);
}
[Fact]
public async Task FailJobAsync_UpdatesStateWithError()
{
var client = CreateClient();
var context = await client.StartJobAsync("tenant-a", "connector-001", null);
await client.FailJobAsync(context, "CONN_ERROR", "Connection failed", retryAfterSeconds: 60);
var state = await _stateRepository.GetAsync("connector-001", CancellationToken.None);
Assert.NotNull(state);
Assert.Equal("Failed", state.LastHeartbeatStatus);
Assert.Equal(1, state.FailureCount);
Assert.Contains("CONN_ERROR", state.LastFailureReason);
Assert.NotNull(state.NextEligibleRun);
}
[Fact]
public void VexWorkerJobContext_SequenceIncrements()
{
var context = new VexWorkerJobContext(
"tenant-a",
"connector-001",
Guid.NewGuid(),
null,
DateTimeOffset.UtcNow);
Assert.Equal(0, context.Sequence);
Assert.Equal(1, context.NextSequence());
Assert.Equal(2, context.NextSequence());
Assert.Equal(3, context.NextSequence());
}
private VexWorkerOrchestratorClient CreateClient()
=> new(
_stateRepository,
_timeProvider,
_options,
NullLogger<VexWorkerOrchestratorClient>.Instance);
private sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _now = new(2025, 11, 27, 12, 0, 0, TimeSpan.Zero);
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
private readonly Dictionary<string, VexConnectorState> _states = new(StringComparer.OrdinalIgnoreCase);
public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_states.TryGetValue(connectorId, out var state);
return ValueTask.FromResult(state);
}
public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_states[state.ConnectorId] = state;
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyCollection<VexConnectorState>> ListAsync(CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
=> ValueTask.FromResult<IReadOnlyCollection<VexConnectorState>>(_states.Values.ToList());
}
}

View File

@@ -15,7 +15,7 @@ public sealed class TenantAuthorityClientFactoryTests
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example/");
var factory = new TenantAuthorityClientFactory(Options.Create(options));
var factory = new TenantAuthorityClientFactory(Microsoft.Extensions.Options.Options.Create(options));
using var client = factory.Create("tenant-a");
@@ -29,7 +29,7 @@ public sealed class TenantAuthorityClientFactoryTests
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example/");
var factory = new TenantAuthorityClientFactory(Options.Create(options));
var factory = new TenantAuthorityClientFactory(Microsoft.Extensions.Options.Options.Create(options));
FluentActions.Invoking(() => factory.Create(string.Empty))
.Should().Throw<ArgumentException>();
@@ -40,7 +40,7 @@ public sealed class TenantAuthorityClientFactoryTests
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example/");
var factory = new TenantAuthorityClientFactory(Options.Create(options));
var factory = new TenantAuthorityClientFactory(Microsoft.Extensions.Options.Options.Create(options));
FluentActions.Invoking(() => factory.Create("tenant-b"))
.Should().Throw<InvalidOperationException>();

View File

@@ -0,0 +1,352 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Service that enforces determinism constraints during policy evaluation.
/// Combines static analysis and runtime monitoring.
/// </summary>
public sealed class DeterminismGuardService
{
private readonly ProhibitedPatternAnalyzer _analyzer;
private readonly DeterminismGuardOptions _options;
private readonly RuntimeDeterminismMonitor _runtimeMonitor;
public DeterminismGuardService(DeterminismGuardOptions? options = null)
{
_options = options ?? DeterminismGuardOptions.Default;
_analyzer = new ProhibitedPatternAnalyzer();
_runtimeMonitor = new RuntimeDeterminismMonitor(_options);
}
/// <summary>
/// Analyzes source code for determinism violations.
/// </summary>
public DeterminismAnalysisResult AnalyzeSource(string sourceCode, string? fileName = null)
{
return _analyzer.AnalyzeSource(sourceCode, fileName, _options);
}
/// <summary>
/// Creates a guarded execution scope for policy evaluation.
/// </summary>
public EvaluationScope CreateScope(string scopeId, DateTimeOffset evaluationTimestamp)
{
return new EvaluationScope(scopeId, evaluationTimestamp, _options, _runtimeMonitor);
}
/// <summary>
/// Validates that a policy evaluation context is deterministic.
/// </summary>
public DeterminismAnalysisResult ValidateContext<TContext>(TContext context, string contextName)
{
var stopwatch = Stopwatch.StartNew();
var violations = new List<DeterminismViolation>();
// Check for null
if (context is null)
{
violations.Add(new DeterminismViolation
{
Category = DeterminismViolationCategory.Other,
ViolationType = "NullContext",
Message = $"Evaluation context '{contextName}' is null",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Provide a valid evaluation context"
});
}
stopwatch.Stop();
var countBySeverity = violations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = violations.Any(v => v.Severity >= _options.FailOnSeverity);
var passed = !_options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = violations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = stopwatch.ElapsedMilliseconds,
EnforcementEnabled = _options.EnforcementEnabled
};
}
/// <summary>
/// Gets a determinism-safe time provider that only returns injected timestamps.
/// </summary>
public DeterministicTimeProvider GetTimeProvider(DateTimeOffset fixedTimestamp)
{
return new DeterministicTimeProvider(fixedTimestamp);
}
}
/// <summary>
/// A guarded scope for policy evaluation that tracks determinism violations.
/// </summary>
public sealed class EvaluationScope : IDisposable
{
private readonly string _scopeId;
private readonly DateTimeOffset _evaluationTimestamp;
private readonly DeterminismGuardOptions _options;
private readonly RuntimeDeterminismMonitor _monitor;
private readonly Stopwatch _stopwatch;
private readonly List<DeterminismViolation> _violations;
private bool _disposed;
internal EvaluationScope(
string scopeId,
DateTimeOffset evaluationTimestamp,
DeterminismGuardOptions options,
RuntimeDeterminismMonitor monitor)
{
_scopeId = scopeId ?? throw new ArgumentNullException(nameof(scopeId));
_evaluationTimestamp = evaluationTimestamp;
_options = options;
_monitor = monitor;
_stopwatch = Stopwatch.StartNew();
_violations = new List<DeterminismViolation>();
if (_options.EnableRuntimeMonitoring)
{
_monitor.EnterScope(scopeId);
}
}
/// <summary>
/// Scope identifier for tracing.
/// </summary>
public string ScopeId => _scopeId;
/// <summary>
/// The fixed evaluation timestamp for this scope.
/// </summary>
public DateTimeOffset EvaluationTimestamp => _evaluationTimestamp;
/// <summary>
/// Reports a runtime violation detected during evaluation.
/// </summary>
public void ReportViolation(DeterminismViolation violation)
{
ArgumentNullException.ThrowIfNull(violation);
lock (_violations)
{
_violations.Add(violation);
}
if (_options.EnforcementEnabled && violation.Severity >= _options.FailOnSeverity)
{
throw new DeterminismViolationException(violation);
}
}
/// <summary>
/// Gets the current timestamp (always returns the fixed evaluation timestamp).
/// </summary>
public DateTimeOffset GetTimestamp() => _evaluationTimestamp;
/// <summary>
/// Gets all violations recorded in this scope.
/// </summary>
public IReadOnlyList<DeterminismViolation> GetViolations()
{
lock (_violations)
{
return _violations.ToList();
}
}
/// <summary>
/// Completes the scope and returns analysis results.
/// </summary>
public DeterminismAnalysisResult Complete()
{
_stopwatch.Stop();
IReadOnlyList<DeterminismViolation> allViolations;
lock (_violations)
{
allViolations = _violations.ToList();
}
var countBySeverity = allViolations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = allViolations.Any(v => v.Severity >= _options.FailOnSeverity);
var passed = !_options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = allViolations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = _stopwatch.ElapsedMilliseconds,
EnforcementEnabled = _options.EnforcementEnabled
};
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_options.EnableRuntimeMonitoring)
{
_monitor.ExitScope(_scopeId);
}
}
}
/// <summary>
/// Exception thrown when a determinism violation is detected with enforcement enabled.
/// </summary>
public sealed class DeterminismViolationException : Exception
{
public DeterminismViolationException(DeterminismViolation violation)
: base($"Determinism violation: {violation.Message}")
{
Violation = violation;
}
public DeterminismViolation Violation { get; }
}
/// <summary>
/// Time provider that always returns a fixed timestamp.
/// </summary>
public sealed class DeterministicTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTimestamp;
public DeterministicTimeProvider(DateTimeOffset fixedTimestamp)
{
_fixedTimestamp = fixedTimestamp;
}
public override DateTimeOffset GetUtcNow() => _fixedTimestamp;
public override TimeZoneInfo LocalTimeZone => TimeZoneInfo.Utc;
}
/// <summary>
/// Runtime monitor for detecting non-deterministic operations.
/// </summary>
internal sealed class RuntimeDeterminismMonitor
{
private readonly DeterminismGuardOptions _options;
private readonly HashSet<string> _activeScopes = new(StringComparer.Ordinal);
private readonly object _lock = new();
public RuntimeDeterminismMonitor(DeterminismGuardOptions options)
{
_options = options;
}
public void EnterScope(string scopeId)
{
lock (_lock)
{
_activeScopes.Add(scopeId);
}
}
public void ExitScope(string scopeId)
{
lock (_lock)
{
_activeScopes.Remove(scopeId);
}
}
public bool IsInScope => _activeScopes.Count > 0;
/// <summary>
/// Checks if we're in a guarded scope and should intercept operations.
/// </summary>
public bool ShouldIntercept()
{
return _options.EnableRuntimeMonitoring && IsInScope;
}
}
/// <summary>
/// Extension methods for integrating determinism guard with evaluation.
/// </summary>
public static class DeterminismGuardExtensions
{
/// <summary>
/// Executes an evaluation function within a determinism-guarded scope.
/// </summary>
public static TResult ExecuteGuarded<TResult>(
this DeterminismGuardService guard,
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, TResult> evaluation)
{
ArgumentNullException.ThrowIfNull(guard);
ArgumentNullException.ThrowIfNull(evaluation);
using var scope = guard.CreateScope(scopeId, evaluationTimestamp);
try
{
return evaluation(scope);
}
finally
{
var result = scope.Complete();
if (!result.Passed)
{
// Log violations even if not throwing
foreach (var violation in result.Violations)
{
// In production, this would log to structured logging
System.Diagnostics.Debug.WriteLine(
$"[DeterminismGuard] {violation.Severity}: {violation.Message}");
}
}
}
}
/// <summary>
/// Executes an async evaluation function within a determinism-guarded scope.
/// </summary>
public static async Task<TResult> ExecuteGuardedAsync<TResult>(
this DeterminismGuardService guard,
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, Task<TResult>> evaluation)
{
ArgumentNullException.ThrowIfNull(guard);
ArgumentNullException.ThrowIfNull(evaluation);
using var scope = guard.CreateScope(scopeId, evaluationTimestamp);
try
{
return await evaluation(scope).ConfigureAwait(false);
}
finally
{
var result = scope.Complete();
if (!result.Passed)
{
foreach (var violation in result.Violations)
{
System.Diagnostics.Debug.WriteLine(
$"[DeterminismGuard] {violation.Severity}: {violation.Message}");
}
}
}
}
}

View File

@@ -0,0 +1,197 @@
using System.Collections.Immutable;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Represents a determinism violation detected during static analysis or runtime.
/// </summary>
public sealed record DeterminismViolation
{
/// <summary>
/// Category of the violation.
/// </summary>
public required DeterminismViolationCategory Category { get; init; }
/// <summary>
/// Specific violation type.
/// </summary>
public required string ViolationType { get; init; }
/// <summary>
/// Human-readable description of the violation.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Source location (file path, if known).
/// </summary>
public string? SourceFile { get; init; }
/// <summary>
/// Line number (if known from static analysis).
/// </summary>
public int? LineNumber { get; init; }
/// <summary>
/// Member or method name where violation occurred.
/// </summary>
public string? MemberName { get; init; }
/// <summary>
/// Severity of the violation.
/// </summary>
public required DeterminismViolationSeverity Severity { get; init; }
/// <summary>
/// Suggested remediation.
/// </summary>
public string? Remediation { get; init; }
}
/// <summary>
/// Category of determinism violation.
/// </summary>
public enum DeterminismViolationCategory
{
/// <summary>Wall-clock time access (DateTime.Now, etc.).</summary>
WallClock,
/// <summary>Random number generation.</summary>
RandomNumber,
/// <summary>Network access (HttpClient, sockets, etc.).</summary>
NetworkAccess,
/// <summary>Filesystem access.</summary>
FileSystemAccess,
/// <summary>Environment variable access.</summary>
EnvironmentAccess,
/// <summary>GUID generation.</summary>
GuidGeneration,
/// <summary>Thread/Task operations that may introduce non-determinism.</summary>
ConcurrencyHazard,
/// <summary>Floating-point operations that may have platform variance.</summary>
FloatingPointHazard,
/// <summary>Dictionary iteration without stable ordering.</summary>
UnstableIteration,
/// <summary>Other non-deterministic operation.</summary>
Other
}
/// <summary>
/// Severity level of a determinism violation.
/// </summary>
public enum DeterminismViolationSeverity
{
/// <summary>Informational - may not cause issues.</summary>
Info,
/// <summary>Warning - potential non-determinism.</summary>
Warning,
/// <summary>Error - definite non-determinism source.</summary>
Error,
/// <summary>Critical - must be fixed before deployment.</summary>
Critical
}
/// <summary>
/// Result of determinism analysis.
/// </summary>
public sealed record DeterminismAnalysisResult
{
/// <summary>
/// Whether the analysis passed (no critical/error violations).
/// </summary>
public required bool Passed { get; init; }
/// <summary>
/// All violations found.
/// </summary>
public required ImmutableArray<DeterminismViolation> Violations { get; init; }
/// <summary>
/// Count of violations by severity.
/// </summary>
public required ImmutableDictionary<DeterminismViolationSeverity, int> CountBySeverity { get; init; }
/// <summary>
/// Analysis duration in milliseconds.
/// </summary>
public required long AnalysisDurationMs { get; init; }
/// <summary>
/// Whether the guard is currently enforcing (blocking on violations).
/// </summary>
public required bool EnforcementEnabled { get; init; }
/// <summary>
/// Creates a passing result with no violations.
/// </summary>
public static DeterminismAnalysisResult Pass(long durationMs, bool enforcementEnabled) => new()
{
Passed = true,
Violations = ImmutableArray<DeterminismViolation>.Empty,
CountBySeverity = ImmutableDictionary<DeterminismViolationSeverity, int>.Empty,
AnalysisDurationMs = durationMs,
EnforcementEnabled = enforcementEnabled
};
}
/// <summary>
/// Configuration for determinism guard behavior.
/// </summary>
public sealed record DeterminismGuardOptions
{
/// <summary>
/// Whether enforcement is enabled (blocks on violations).
/// </summary>
public bool EnforcementEnabled { get; init; } = true;
/// <summary>
/// Minimum severity level to fail enforcement.
/// </summary>
public DeterminismViolationSeverity FailOnSeverity { get; init; } = DeterminismViolationSeverity.Error;
/// <summary>
/// Whether to log all violations regardless of enforcement.
/// </summary>
public bool LogAllViolations { get; init; } = true;
/// <summary>
/// Whether to analyze code statically before execution.
/// </summary>
public bool EnableStaticAnalysis { get; init; } = true;
/// <summary>
/// Whether to monitor runtime behavior.
/// </summary>
public bool EnableRuntimeMonitoring { get; init; } = true;
/// <summary>
/// Patterns to exclude from analysis (e.g., test code).
/// </summary>
public ImmutableArray<string> ExcludePatterns { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Default options for production use.
/// </summary>
public static DeterminismGuardOptions Default { get; } = new();
/// <summary>
/// Options for development/testing (warnings only).
/// </summary>
public static DeterminismGuardOptions Development { get; } = new()
{
EnforcementEnabled = false,
FailOnSeverity = DeterminismViolationSeverity.Critical,
LogAllViolations = true
};
}

View File

@@ -0,0 +1,375 @@
using System.Collections.Immutable;
using System.Diagnostics;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Wraps policy evaluation with determinism guard protection.
/// Enforces static analysis and runtime monitoring during evaluation.
/// </summary>
public sealed class GuardedPolicyEvaluator
{
private readonly DeterminismGuardService _guard;
private readonly ProhibitedPatternAnalyzer _analyzer;
public GuardedPolicyEvaluator(DeterminismGuardOptions? options = null)
{
var opts = options ?? DeterminismGuardOptions.Default;
_guard = new DeterminismGuardService(opts);
_analyzer = new ProhibitedPatternAnalyzer();
}
/// <summary>
/// Pre-validates policy source code for determinism violations.
/// Should be called during policy compilation/registration.
/// </summary>
public DeterminismAnalysisResult ValidatePolicySource(
string sourceCode,
string? fileName = null,
DeterminismGuardOptions? options = null)
{
return _guard.AnalyzeSource(sourceCode, fileName);
}
/// <summary>
/// Pre-validates multiple policy source files.
/// </summary>
public DeterminismAnalysisResult ValidatePolicySources(
IEnumerable<(string SourceCode, string FileName)> sources,
DeterminismGuardOptions? options = null)
{
var opts = options ?? DeterminismGuardOptions.Default;
return _analyzer.AnalyzeMultiple(sources, opts);
}
/// <summary>
/// Evaluates a policy within a determinism-guarded scope.
/// </summary>
public GuardedEvaluationResult<TResult> Evaluate<TResult>(
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, TResult> evaluation)
{
ArgumentNullException.ThrowIfNull(evaluation);
var stopwatch = Stopwatch.StartNew();
using var scope = _guard.CreateScope(scopeId, evaluationTimestamp);
try
{
var result = evaluation(scope);
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = guardResult.Passed,
Result = result,
Violations = guardResult.Violations,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (DeterminismViolationException ex)
{
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = guardResult.Violations,
BlockingViolation = ex.Violation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (Exception ex)
{
var violations = scope.GetViolations();
stopwatch.Stop();
// Record the unexpected exception as a violation
var exceptionViolation = new DeterminismViolation
{
Category = DeterminismViolationCategory.Other,
ViolationType = "EvaluationException",
Message = $"Unexpected exception during evaluation: {ex.Message}",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Review policy logic for errors"
};
var allViolations = violations
.Append(exceptionViolation)
.ToImmutableArray();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = allViolations,
BlockingViolation = exceptionViolation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp,
Exception = ex
};
}
}
/// <summary>
/// Evaluates a policy asynchronously within a determinism-guarded scope.
/// </summary>
public async Task<GuardedEvaluationResult<TResult>> EvaluateAsync<TResult>(
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, Task<TResult>> evaluation,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(evaluation);
var stopwatch = Stopwatch.StartNew();
using var scope = _guard.CreateScope(scopeId, evaluationTimestamp);
try
{
var result = await evaluation(scope).ConfigureAwait(false);
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = guardResult.Passed,
Result = result,
Violations = guardResult.Violations,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
throw;
}
catch (DeterminismViolationException ex)
{
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = guardResult.Violations,
BlockingViolation = ex.Violation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (Exception ex)
{
var violations = scope.GetViolations();
stopwatch.Stop();
var exceptionViolation = new DeterminismViolation
{
Category = DeterminismViolationCategory.Other,
ViolationType = "EvaluationException",
Message = $"Unexpected exception during evaluation: {ex.Message}",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Review policy logic for errors"
};
var allViolations = violations
.Append(exceptionViolation)
.ToImmutableArray();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = allViolations,
BlockingViolation = exceptionViolation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp,
Exception = ex
};
}
}
/// <summary>
/// Gets the determinism guard service for advanced usage.
/// </summary>
public DeterminismGuardService Guard => _guard;
}
/// <summary>
/// Result of a guarded policy evaluation.
/// </summary>
public sealed record GuardedEvaluationResult<TResult>
{
/// <summary>
/// Whether the evaluation succeeded without blocking violations.
/// </summary>
public required bool Succeeded { get; init; }
/// <summary>
/// The evaluation result (may be default if failed).
/// </summary>
public TResult? Result { get; init; }
/// <summary>
/// All violations detected during evaluation.
/// </summary>
public required ImmutableArray<DeterminismViolation> Violations { get; init; }
/// <summary>
/// The violation that caused evaluation to be blocked (if any).
/// </summary>
public DeterminismViolation? BlockingViolation { get; init; }
/// <summary>
/// Evaluation duration in milliseconds.
/// </summary>
public required long EvaluationDurationMs { get; init; }
/// <summary>
/// Scope identifier for tracing.
/// </summary>
public required string ScopeId { get; init; }
/// <summary>
/// The fixed evaluation timestamp used.
/// </summary>
public required DateTimeOffset EvaluationTimestamp { get; init; }
/// <summary>
/// Exception that occurred during evaluation (if any).
/// </summary>
public Exception? Exception { get; init; }
/// <summary>
/// Number of violations by severity.
/// </summary>
public ImmutableDictionary<DeterminismViolationSeverity, int> ViolationCountBySeverity =>
Violations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
/// <summary>
/// Whether there are any violations (blocking or not).
/// </summary>
public bool HasViolations => !Violations.IsDefaultOrEmpty;
/// <summary>
/// Whether the evaluation was blocked by a violation.
/// </summary>
public bool WasBlocked => BlockingViolation is not null;
}
/// <summary>
/// Builder for creating guarded policy evaluator with custom configuration.
/// </summary>
public sealed class GuardedPolicyEvaluatorBuilder
{
private bool _enforcementEnabled = true;
private DeterminismViolationSeverity _failOnSeverity = DeterminismViolationSeverity.Error;
private bool _enableStaticAnalysis = true;
private bool _enableRuntimeMonitoring = true;
private bool _logAllViolations = true;
private ImmutableArray<string> _excludePatterns = ImmutableArray<string>.Empty;
/// <summary>
/// Enables or disables enforcement (blocking on violations).
/// </summary>
public GuardedPolicyEvaluatorBuilder WithEnforcement(bool enabled)
{
_enforcementEnabled = enabled;
return this;
}
/// <summary>
/// Sets the minimum severity level to block evaluation.
/// </summary>
public GuardedPolicyEvaluatorBuilder FailOnSeverity(DeterminismViolationSeverity severity)
{
_failOnSeverity = severity;
return this;
}
/// <summary>
/// Enables or disables static code analysis.
/// </summary>
public GuardedPolicyEvaluatorBuilder WithStaticAnalysis(bool enabled)
{
_enableStaticAnalysis = enabled;
return this;
}
/// <summary>
/// Enables or disables runtime monitoring.
/// </summary>
public GuardedPolicyEvaluatorBuilder WithRuntimeMonitoring(bool enabled)
{
_enableRuntimeMonitoring = enabled;
return this;
}
/// <summary>
/// Enables or disables logging of all violations.
/// </summary>
public GuardedPolicyEvaluatorBuilder WithViolationLogging(bool enabled)
{
_logAllViolations = enabled;
return this;
}
/// <summary>
/// Adds patterns to exclude from analysis.
/// </summary>
public GuardedPolicyEvaluatorBuilder ExcludePatterns(params string[] patterns)
{
_excludePatterns = _excludePatterns.AddRange(patterns);
return this;
}
/// <summary>
/// Creates the configured GuardedPolicyEvaluator.
/// </summary>
public GuardedPolicyEvaluator Build()
{
var options = new DeterminismGuardOptions
{
EnforcementEnabled = _enforcementEnabled,
FailOnSeverity = _failOnSeverity,
EnableStaticAnalysis = _enableStaticAnalysis,
EnableRuntimeMonitoring = _enableRuntimeMonitoring,
LogAllViolations = _logAllViolations,
ExcludePatterns = _excludePatterns
};
return new GuardedPolicyEvaluator(options);
}
/// <summary>
/// Creates a development-mode evaluator (warnings only, no blocking).
/// </summary>
public static GuardedPolicyEvaluator CreateDevelopment()
{
return new GuardedPolicyEvaluator(DeterminismGuardOptions.Development);
}
/// <summary>
/// Creates a production-mode evaluator (full enforcement).
/// </summary>
public static GuardedPolicyEvaluator CreateProduction()
{
return new GuardedPolicyEvaluator(DeterminismGuardOptions.Default);
}
}

View File

@@ -0,0 +1,412 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.RegularExpressions;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Static analyzer that detects prohibited non-deterministic patterns in source code.
/// </summary>
public sealed partial class ProhibitedPatternAnalyzer
{
private static readonly ImmutableArray<ProhibitedPattern> Patterns = CreatePatterns();
/// <summary>
/// Analyzes source code for prohibited patterns.
/// </summary>
public DeterminismAnalysisResult AnalyzeSource(
string sourceCode,
string? fileName,
DeterminismGuardOptions options)
{
ArgumentNullException.ThrowIfNull(sourceCode);
options ??= DeterminismGuardOptions.Default;
var stopwatch = Stopwatch.StartNew();
var violations = new List<DeterminismViolation>();
// Check exclusions
if (fileName is not null && IsExcluded(fileName, options.ExcludePatterns))
{
return DeterminismAnalysisResult.Pass(stopwatch.ElapsedMilliseconds, options.EnforcementEnabled);
}
// Split into lines for line number tracking
var lines = sourceCode.Split('\n');
for (var lineIndex = 0; lineIndex < lines.Length; lineIndex++)
{
var line = lines[lineIndex];
var lineNumber = lineIndex + 1;
// Skip comments
var trimmedLine = line.TrimStart();
if (trimmedLine.StartsWith("//") || trimmedLine.StartsWith("/*") || trimmedLine.StartsWith("*"))
{
continue;
}
foreach (var pattern in Patterns)
{
if (pattern.Regex.IsMatch(line))
{
violations.Add(new DeterminismViolation
{
Category = pattern.Category,
ViolationType = pattern.ViolationType,
Message = pattern.Message,
SourceFile = fileName,
LineNumber = lineNumber,
MemberName = ExtractMemberContext(lines, lineIndex),
Severity = pattern.Severity,
Remediation = pattern.Remediation
});
}
}
}
stopwatch.Stop();
var countBySeverity = violations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = violations.Any(v => v.Severity >= options.FailOnSeverity);
var passed = !options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = violations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = stopwatch.ElapsedMilliseconds,
EnforcementEnabled = options.EnforcementEnabled
};
}
/// <summary>
/// Analyzes multiple source files.
/// </summary>
public DeterminismAnalysisResult AnalyzeMultiple(
IEnumerable<(string SourceCode, string FileName)> sources,
DeterminismGuardOptions options)
{
ArgumentNullException.ThrowIfNull(sources);
options ??= DeterminismGuardOptions.Default;
var stopwatch = Stopwatch.StartNew();
var allViolations = new List<DeterminismViolation>();
foreach (var (sourceCode, fileName) in sources)
{
var result = AnalyzeSource(sourceCode, fileName, options with { EnforcementEnabled = false });
allViolations.AddRange(result.Violations);
}
stopwatch.Stop();
var countBySeverity = allViolations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = allViolations.Any(v => v.Severity >= options.FailOnSeverity);
var passed = !options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = allViolations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = stopwatch.ElapsedMilliseconds,
EnforcementEnabled = options.EnforcementEnabled
};
}
private static bool IsExcluded(string fileName, ImmutableArray<string> excludePatterns)
{
if (excludePatterns.IsDefaultOrEmpty)
{
return false;
}
return excludePatterns.Any(pattern =>
fileName.Contains(pattern, StringComparison.OrdinalIgnoreCase));
}
private static string? ExtractMemberContext(string[] lines, int lineIndex)
{
// Look backwards for method/property/class declaration
for (var i = lineIndex; i >= 0 && i > lineIndex - 20; i--)
{
var line = lines[i].Trim();
// Method pattern
var methodMatch = MethodDeclarationRegex().Match(line);
if (methodMatch.Success)
{
return methodMatch.Groups[1].Value;
}
// Property pattern
var propertyMatch = PropertyDeclarationRegex().Match(line);
if (propertyMatch.Success)
{
return propertyMatch.Groups[1].Value;
}
// Class pattern
var classMatch = ClassDeclarationRegex().Match(line);
if (classMatch.Success)
{
return classMatch.Groups[1].Value;
}
}
return null;
}
[GeneratedRegex(@"(?:public|private|protected|internal)\s+.*?\s+(\w+)\s*\(")]
private static partial Regex MethodDeclarationRegex();
[GeneratedRegex(@"(?:public|private|protected|internal)\s+.*?\s+(\w+)\s*\{")]
private static partial Regex PropertyDeclarationRegex();
[GeneratedRegex(@"(?:class|struct|record)\s+(\w+)")]
private static partial Regex ClassDeclarationRegex();
private static ImmutableArray<ProhibitedPattern> CreatePatterns()
{
return ImmutableArray.Create(
// Wall-clock violations
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTime.Now",
Regex = DateTimeNowRegex(),
Message = "DateTime.Now usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTime.UtcNow",
Regex = DateTimeUtcNowRegex(),
Message = "DateTime.UtcNow usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTimeOffset.Now",
Regex = DateTimeOffsetNowRegex(),
Message = "DateTimeOffset.Now usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTimeOffset.UtcNow",
Regex = DateTimeOffsetUtcNowRegex(),
Message = "DateTimeOffset.UtcNow usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
// Random number violations
new ProhibitedPattern
{
Category = DeterminismViolationCategory.RandomNumber,
ViolationType = "Random",
Regex = RandomClassRegex(),
Message = "Random class usage detected - non-deterministic random number generation",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use deterministic seeded random if needed, or remove randomness"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.RandomNumber,
ViolationType = "RandomNumberGenerator",
Regex = CryptoRandomRegex(),
Message = "Cryptographic random usage detected - non-deterministic",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Remove cryptographic random from evaluation path"
},
// GUID generation
new ProhibitedPattern
{
Category = DeterminismViolationCategory.GuidGeneration,
ViolationType = "Guid.NewGuid",
Regex = GuidNewGuidRegex(),
Message = "Guid.NewGuid() usage detected - non-deterministic identifier generation",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use deterministic ID generation based on content hash"
},
// Network access
new ProhibitedPattern
{
Category = DeterminismViolationCategory.NetworkAccess,
ViolationType = "HttpClient",
Regex = HttpClientRegex(),
Message = "HttpClient usage detected - network access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove network access from evaluation path"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.NetworkAccess,
ViolationType = "WebClient",
Regex = WebClientRegex(),
Message = "WebClient usage detected - network access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove network access from evaluation path"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.NetworkAccess,
ViolationType = "Socket",
Regex = SocketRegex(),
Message = "Socket usage detected - network access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove socket access from evaluation path"
},
// Environment access
new ProhibitedPattern
{
Category = DeterminismViolationCategory.EnvironmentAccess,
ViolationType = "Environment.GetEnvironmentVariable",
Regex = EnvironmentGetEnvRegex(),
Message = "Environment variable access detected - host-dependent",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use evaluation context environment properties instead"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.EnvironmentAccess,
ViolationType = "Environment.MachineName",
Regex = EnvironmentMachineNameRegex(),
Message = "Environment.MachineName access detected - host-dependent",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Remove host-specific information from evaluation"
},
// Filesystem access
new ProhibitedPattern
{
Category = DeterminismViolationCategory.FileSystemAccess,
ViolationType = "File.Read",
Regex = FileReadRegex(),
Message = "File read operation detected - filesystem access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove file access from evaluation path"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.FileSystemAccess,
ViolationType = "File.Write",
Regex = FileWriteRegex(),
Message = "File write operation detected - filesystem access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove file access from evaluation path"
},
// Floating-point hazards
new ProhibitedPattern
{
Category = DeterminismViolationCategory.FloatingPointHazard,
ViolationType = "double comparison",
Regex = DoubleComparisonRegex(),
Message = "Direct double comparison detected - may have platform variance",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Use decimal type for precise comparisons"
},
// Unstable iteration
new ProhibitedPattern
{
Category = DeterminismViolationCategory.UnstableIteration,
ViolationType = "Dictionary iteration",
Regex = DictionaryIterationRegex(),
Message = "Dictionary iteration detected - may have unstable ordering",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Use SortedDictionary or OrderBy before iteration"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.UnstableIteration,
ViolationType = "HashSet iteration",
Regex = HashSetIterationRegex(),
Message = "HashSet iteration detected - may have unstable ordering",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Use SortedSet or OrderBy before iteration"
}
);
}
// Generated regex patterns for prohibited patterns
[GeneratedRegex(@"DateTime\.Now(?!\w)")]
private static partial Regex DateTimeNowRegex();
[GeneratedRegex(@"DateTime\.UtcNow(?!\w)")]
private static partial Regex DateTimeUtcNowRegex();
[GeneratedRegex(@"DateTimeOffset\.Now(?!\w)")]
private static partial Regex DateTimeOffsetNowRegex();
[GeneratedRegex(@"DateTimeOffset\.UtcNow(?!\w)")]
private static partial Regex DateTimeOffsetUtcNowRegex();
[GeneratedRegex(@"new\s+Random\s*\(")]
private static partial Regex RandomClassRegex();
[GeneratedRegex(@"RandomNumberGenerator")]
private static partial Regex CryptoRandomRegex();
[GeneratedRegex(@"Guid\.NewGuid\s*\(")]
private static partial Regex GuidNewGuidRegex();
[GeneratedRegex(@"HttpClient")]
private static partial Regex HttpClientRegex();
[GeneratedRegex(@"WebClient")]
private static partial Regex WebClientRegex();
[GeneratedRegex(@"(?:TcpClient|UdpClient|Socket)\s*\(")]
private static partial Regex SocketRegex();
[GeneratedRegex(@"Environment\.GetEnvironmentVariable")]
private static partial Regex EnvironmentGetEnvRegex();
[GeneratedRegex(@"Environment\.MachineName")]
private static partial Regex EnvironmentMachineNameRegex();
[GeneratedRegex(@"File\.(?:Read|Open|ReadAll)")]
private static partial Regex FileReadRegex();
[GeneratedRegex(@"File\.(?:Write|Create|Append)")]
private static partial Regex FileWriteRegex();
[GeneratedRegex(@"(?:double|float)\s+\w+\s*[=<>!]=")]
private static partial Regex DoubleComparisonRegex();
[GeneratedRegex(@"foreach\s*\([^)]+\s+in\s+\w*[Dd]ictionary")]
private static partial Regex DictionaryIterationRegex();
[GeneratedRegex(@"foreach\s*\([^)]+\s+in\s+\w*[Hh]ashSet")]
private static partial Regex HashSetIterationRegex();
private sealed record ProhibitedPattern
{
public required DeterminismViolationCategory Category { get; init; }
public required string ViolationType { get; init; }
public required Regex Regex { get; init; }
public required string Message { get; init; }
public required DeterminismViolationSeverity Severity { get; init; }
public string? Remediation { get; init; }
}
}

View File

@@ -0,0 +1,81 @@
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Domain;
/// <summary>
/// Request for a policy decision with source evidence summaries (POLICY-ENGINE-40-003).
/// </summary>
public sealed record PolicyDecisionRequest(
[property: JsonPropertyName("snapshot_id")] string SnapshotId,
[property: JsonPropertyName("tenant_id")] string? TenantId = null,
[property: JsonPropertyName("component_purl")] string? ComponentPurl = null,
[property: JsonPropertyName("advisory_id")] string? AdvisoryId = null,
[property: JsonPropertyName("include_evidence")] bool IncludeEvidence = true,
[property: JsonPropertyName("max_sources")] int MaxSources = 5);
/// <summary>
/// Response containing policy decisions with source evidence summaries.
/// </summary>
public sealed record PolicyDecisionResponse(
[property: JsonPropertyName("snapshot_id")] string SnapshotId,
[property: JsonPropertyName("decisions")] IReadOnlyList<PolicyDecisionItem> Decisions,
[property: JsonPropertyName("summary")] PolicyDecisionSummary Summary);
/// <summary>
/// A single policy decision with associated evidence.
/// </summary>
public sealed record PolicyDecisionItem(
[property: JsonPropertyName("tenant_id")] string TenantId,
[property: JsonPropertyName("component_purl")] string ComponentPurl,
[property: JsonPropertyName("advisory_id")] string AdvisoryId,
[property: JsonPropertyName("severity_fused")] string SeverityFused,
[property: JsonPropertyName("score")] decimal Score,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("top_sources")] IReadOnlyList<PolicyDecisionSource> TopSources,
[property: JsonPropertyName("evidence")] PolicyDecisionEvidence? Evidence,
[property: JsonPropertyName("conflict_count")] int ConflictCount,
[property: JsonPropertyName("reason_codes")] IReadOnlyList<string> ReasonCodes);
/// <summary>
/// Top severity source information for a decision.
/// </summary>
public sealed record PolicyDecisionSource(
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("weight")] decimal Weight,
[property: JsonPropertyName("severity")] string Severity,
[property: JsonPropertyName("score")] decimal Score,
[property: JsonPropertyName("rank")] int Rank);
/// <summary>
/// Evidence summary for a policy decision.
/// </summary>
public sealed record PolicyDecisionEvidence(
[property: JsonPropertyName("headline")] string Headline,
[property: JsonPropertyName("severity")] string Severity,
[property: JsonPropertyName("locator")] PolicyDecisionLocator Locator,
[property: JsonPropertyName("signals")] IReadOnlyList<string> Signals);
/// <summary>
/// Evidence locator information.
/// </summary>
public sealed record PolicyDecisionLocator(
[property: JsonPropertyName("file_path")] string FilePath,
[property: JsonPropertyName("digest")] string? Digest);
/// <summary>
/// Summary statistics for the decision response.
/// </summary>
public sealed record PolicyDecisionSummary(
[property: JsonPropertyName("total_decisions")] int TotalDecisions,
[property: JsonPropertyName("total_conflicts")] int TotalConflicts,
[property: JsonPropertyName("severity_counts")] IReadOnlyDictionary<string, int> SeverityCounts,
[property: JsonPropertyName("top_severity_sources")] IReadOnlyList<PolicyDecisionSourceRank> TopSeveritySources);
/// <summary>
/// Aggregated source rank across all decisions.
/// </summary>
public sealed record PolicyDecisionSourceRank(
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("total_weight")] decimal TotalWeight,
[property: JsonPropertyName("decision_count")] int DecisionCount,
[property: JsonPropertyName("average_score")] decimal AverageScore);

View File

@@ -0,0 +1,360 @@
using System.Security.Claims;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Overrides;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class OverrideEndpoints
{
public static IEndpointRouteBuilder MapOverrides(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/overrides")
.RequireAuthorization()
.WithTags("Risk Overrides");
group.MapPost("/", CreateOverride)
.WithName("CreateOverride")
.WithSummary("Create a new override with audit metadata.")
.Produces<OverrideResponse>(StatusCodes.Status201Created)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapGet("/{overrideId}", GetOverride)
.WithName("GetOverride")
.WithSummary("Get an override by ID.")
.Produces<OverrideResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapDelete("/{overrideId}", DeleteOverride)
.WithName("DeleteOverride")
.WithSummary("Delete an override.")
.Produces(StatusCodes.Status204NoContent)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/profile/{profileId}", ListProfileOverrides)
.WithName("ListProfileOverrides")
.WithSummary("List all overrides for a risk profile.")
.Produces<OverrideListResponse>(StatusCodes.Status200OK);
group.MapPost("/validate", ValidateOverride)
.WithName("ValidateOverride")
.WithSummary("Validate an override for conflicts before creating.")
.Produces<OverrideValidationResponse>(StatusCodes.Status200OK);
group.MapPost("/{overrideId}:approve", ApproveOverride)
.WithName("ApproveOverride")
.WithSummary("Approve an override that requires review.")
.Produces<OverrideResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/{overrideId}:disable", DisableOverride)
.WithName("DisableOverride")
.WithSummary("Disable an active override.")
.Produces<OverrideResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/{overrideId}/history", GetOverrideHistory)
.WithName("GetOverrideHistory")
.WithSummary("Get application history for an override.")
.Produces<OverrideHistoryResponse>(StatusCodes.Status200OK);
return endpoints;
}
private static IResult CreateOverride(
HttpContext context,
[FromBody] CreateOverrideRequest request,
OverrideService overrideService,
RiskProfileConfigurationService profileService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
if (string.IsNullOrWhiteSpace(request.Reason))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Reason is required for audit purposes.",
Status = StatusCodes.Status400BadRequest
});
}
// Verify profile exists
var profile = profileService.GetProfile(request.ProfileId);
if (profile == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profile not found",
Detail = $"Risk profile '{request.ProfileId}' was not found.",
Status = StatusCodes.Status400BadRequest
});
}
// Validate for conflicts
var validation = overrideService.ValidateConflicts(request);
if (validation.HasConflicts)
{
var conflictDetails = string.Join("; ", validation.Conflicts.Select(c => c.Description));
return Results.BadRequest(new ProblemDetails
{
Title = "Override conflicts detected",
Detail = conflictDetails,
Status = StatusCodes.Status400BadRequest,
Extensions = { ["conflicts"] = validation.Conflicts }
});
}
var actorId = ResolveActorId(context);
try
{
var auditedOverride = overrideService.Create(request, actorId);
return Results.Created(
$"/api/risk/overrides/{auditedOverride.OverrideId}",
new OverrideResponse(auditedOverride, validation.Warnings));
}
catch (ArgumentException ex)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult GetOverride(
HttpContext context,
[FromRoute] string overrideId,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var auditedOverride = overrideService.Get(overrideId);
if (auditedOverride == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new OverrideResponse(auditedOverride, null));
}
private static IResult DeleteOverride(
HttpContext context,
[FromRoute] string overrideId,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (!overrideService.Delete(overrideId))
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.NoContent();
}
private static IResult ListProfileOverrides(
HttpContext context,
[FromRoute] string profileId,
[FromQuery] bool includeInactive,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var overrides = overrideService.ListByProfile(profileId, includeInactive);
return Results.Ok(new OverrideListResponse(profileId, overrides));
}
private static IResult ValidateOverride(
HttpContext context,
[FromBody] CreateOverrideRequest request,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Request body is required.",
Status = StatusCodes.Status400BadRequest
});
}
var validation = overrideService.ValidateConflicts(request);
return Results.Ok(new OverrideValidationResponse(validation));
}
private static IResult ApproveOverride(
HttpContext context,
[FromRoute] string overrideId,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyActivate);
if (scopeResult is not null)
{
return scopeResult;
}
var actorId = ResolveActorId(context);
try
{
var auditedOverride = overrideService.Approve(overrideId, actorId ?? "system");
if (auditedOverride == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new OverrideResponse(auditedOverride, null));
}
catch (InvalidOperationException ex)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Approval failed",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult DisableOverride(
HttpContext context,
[FromRoute] string overrideId,
[FromQuery] string? reason,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
var actorId = ResolveActorId(context);
var auditedOverride = overrideService.Disable(overrideId, actorId ?? "system", reason);
if (auditedOverride == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new OverrideResponse(auditedOverride, null));
}
private static IResult GetOverrideHistory(
HttpContext context,
[FromRoute] string overrideId,
[FromQuery] int limit,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var history = overrideService.GetApplicationHistory(overrideId, effectiveLimit);
return Results.Ok(new OverrideHistoryResponse(overrideId, history));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Response DTOs
internal sealed record OverrideResponse(
AuditedOverride Override,
IReadOnlyList<string>? Warnings);
internal sealed record OverrideListResponse(
string ProfileId,
IReadOnlyList<AuditedOverride> Overrides);
internal sealed record OverrideValidationResponse(OverrideConflictValidation Validation);
internal sealed record OverrideHistoryResponse(
string OverrideId,
IReadOnlyList<OverrideApplicationRecord> History);
#endregion

View File

@@ -0,0 +1,77 @@
using Microsoft.AspNetCore.Mvc;
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Services;
namespace StellaOps.Policy.Engine.Endpoints;
/// <summary>
/// API endpoint for policy decisions with source evidence summaries (POLICY-ENGINE-40-003).
/// </summary>
public static class PolicyDecisionEndpoint
{
public static IEndpointRouteBuilder MapPolicyDecisions(this IEndpointRouteBuilder routes)
{
routes.MapPost("/policy/decisions", GetDecisionsAsync)
.WithName("PolicyEngine.Decisions")
.WithDescription("Request policy decisions with source evidence summaries, top severity sources, and conflict counts.");
routes.MapGet("/policy/decisions/{snapshotId}", GetDecisionsBySnapshotAsync)
.WithName("PolicyEngine.Decisions.BySnapshot")
.WithDescription("Get policy decisions for a specific snapshot.");
return routes;
}
private static async Task<IResult> GetDecisionsAsync(
[FromBody] PolicyDecisionRequest request,
PolicyDecisionService service,
CancellationToken cancellationToken)
{
try
{
var response = await service.GetDecisionsAsync(request, cancellationToken).ConfigureAwait(false);
return Results.Ok(response);
}
catch (ArgumentException ex)
{
return Results.BadRequest(new { message = ex.Message });
}
catch (KeyNotFoundException ex)
{
return Results.NotFound(new { message = ex.Message });
}
}
private static async Task<IResult> GetDecisionsBySnapshotAsync(
[FromRoute] string snapshotId,
[FromQuery] string? tenantId,
[FromQuery] string? componentPurl,
[FromQuery] string? advisoryId,
[FromQuery] bool includeEvidence = true,
[FromQuery] int maxSources = 5,
PolicyDecisionService service = default!,
CancellationToken cancellationToken = default)
{
try
{
var request = new PolicyDecisionRequest(
SnapshotId: snapshotId,
TenantId: tenantId,
ComponentPurl: componentPurl,
AdvisoryId: advisoryId,
IncludeEvidence: includeEvidence,
MaxSources: maxSources);
var response = await service.GetDecisionsAsync(request, cancellationToken).ConfigureAwait(false);
return Results.Ok(response);
}
catch (ArgumentException ex)
{
return Results.BadRequest(new { message = ex.Message });
}
catch (KeyNotFoundException ex)
{
return Results.NotFound(new { message = ex.Message });
}
}
}

View File

@@ -0,0 +1,195 @@
using System.Security.Claims;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Events;
using StellaOps.Policy.Engine.Services;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class ProfileEventEndpoints
{
public static IEndpointRouteBuilder MapProfileEvents(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/events")
.RequireAuthorization()
.WithTags("Profile Events");
group.MapGet("/", GetRecentEvents)
.WithName("GetRecentProfileEvents")
.WithSummary("Get recent profile lifecycle events.")
.Produces<EventListResponse>(StatusCodes.Status200OK);
group.MapGet("/filter", GetFilteredEvents)
.WithName("GetFilteredProfileEvents")
.WithSummary("Get profile events with optional filtering.")
.Produces<EventListResponse>(StatusCodes.Status200OK);
group.MapPost("/subscribe", CreateSubscription)
.WithName("CreateEventSubscription")
.WithSummary("Subscribe to profile lifecycle events.")
.Produces<SubscriptionResponse>(StatusCodes.Status201Created);
group.MapDelete("/subscribe/{subscriptionId}", DeleteSubscription)
.WithName("DeleteEventSubscription")
.WithSummary("Unsubscribe from profile lifecycle events.")
.Produces(StatusCodes.Status204NoContent)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/subscribe/{subscriptionId}/poll", PollSubscription)
.WithName("PollEventSubscription")
.WithSummary("Poll for events from a subscription.")
.Produces<EventListResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
return endpoints;
}
private static IResult GetRecentEvents(
HttpContext context,
[FromQuery] int limit,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var events = eventPublisher.GetRecentEvents(effectiveLimit);
return Results.Ok(new EventListResponse(events));
}
private static IResult GetFilteredEvents(
HttpContext context,
[FromQuery] ProfileEventType? eventType,
[FromQuery] string? profileId,
[FromQuery] DateTimeOffset? since,
[FromQuery] int limit,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var events = eventPublisher.GetEventsFiltered(eventType, profileId, since, effectiveLimit);
return Results.Ok(new EventListResponse(events));
}
private static IResult CreateSubscription(
HttpContext context,
[FromBody] CreateSubscriptionRequest request,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.EventTypes == null || request.EventTypes.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one event type is required.",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
var subscription = eventPublisher.Subscribe(
request.EventTypes,
request.ProfileFilter,
request.WebhookUrl,
actorId);
return Results.Created(
$"/api/risk/events/subscribe/{subscription.SubscriptionId}",
new SubscriptionResponse(subscription));
}
private static IResult DeleteSubscription(
HttpContext context,
[FromRoute] string subscriptionId,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (!eventPublisher.Unsubscribe(subscriptionId))
{
return Results.NotFound(new ProblemDetails
{
Title = "Subscription not found",
Detail = $"Subscription '{subscriptionId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.NoContent();
}
private static IResult PollSubscription(
HttpContext context,
[FromRoute] string subscriptionId,
[FromQuery] int limit,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var events = eventPublisher.GetEvents(subscriptionId, effectiveLimit);
// If no events, the subscription might not exist
// We return empty list either way since the subscription might just have no events
return Results.Ok(new EventListResponse(events));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Request/Response DTOs
internal sealed record EventListResponse(IReadOnlyList<ProfileEvent> Events);
internal sealed record CreateSubscriptionRequest(
IReadOnlyList<ProfileEventType> EventTypes,
string? ProfileFilter,
string? WebhookUrl);
internal sealed record SubscriptionResponse(EventSubscription Subscription);
#endregion

View File

@@ -0,0 +1,238 @@
using System.Security.Claims;
using System.Text.Json;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Export;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class ProfileExportEndpoints
{
public static IEndpointRouteBuilder MapProfileExport(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/profiles/export")
.RequireAuthorization()
.WithTags("Profile Export/Import");
group.MapPost("/", ExportProfiles)
.WithName("ExportProfiles")
.WithSummary("Export risk profiles as a signed bundle.")
.Produces<ExportResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapPost("/download", DownloadBundle)
.WithName("DownloadProfileBundle")
.WithSummary("Export and download risk profiles as a JSON file.")
.Produces<FileContentHttpResult>(StatusCodes.Status200OK, contentType: "application/json");
endpoints.MapPost("/api/risk/profiles/import", ImportProfiles)
.RequireAuthorization()
.WithName("ImportProfiles")
.WithSummary("Import risk profiles from a signed bundle.")
.WithTags("Profile Export/Import")
.Produces<ImportResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
endpoints.MapPost("/api/risk/profiles/verify", VerifyBundle)
.RequireAuthorization()
.WithName("VerifyProfileBundle")
.WithSummary("Verify the signature of a profile bundle without importing.")
.WithTags("Profile Export/Import")
.Produces<VerifyResponse>(StatusCodes.Status200OK);
return endpoints;
}
private static IResult ExportProfiles(
HttpContext context,
[FromBody] ExportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.ProfileIds == null || request.ProfileIds.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one profile ID is required.",
Status = StatusCodes.Status400BadRequest
});
}
var profiles = new List<StellaOps.Policy.RiskProfile.Models.RiskProfileModel>();
var notFound = new List<string>();
foreach (var profileId in request.ProfileIds)
{
var profile = profileService.GetProfile(profileId);
if (profile != null)
{
profiles.Add(profile);
}
else
{
notFound.Add(profileId);
}
}
if (notFound.Count > 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profiles not found",
Detail = $"The following profiles were not found: {string.Join(", ", notFound)}",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
var bundle = exportService.Export(profiles, request, actorId);
return Results.Ok(new ExportResponse(bundle));
}
private static IResult DownloadBundle(
HttpContext context,
[FromBody] ExportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.ProfileIds == null || request.ProfileIds.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one profile ID is required.",
Status = StatusCodes.Status400BadRequest
});
}
var profiles = new List<StellaOps.Policy.RiskProfile.Models.RiskProfileModel>();
foreach (var profileId in request.ProfileIds)
{
var profile = profileService.GetProfile(profileId);
if (profile != null)
{
profiles.Add(profile);
}
}
var actorId = ResolveActorId(context);
var bundle = exportService.Export(profiles, request, actorId);
var json = exportService.SerializeBundle(bundle);
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var fileName = $"risk-profiles-{bundle.BundleId}.json";
return Results.File(bytes, "application/json", fileName);
}
private static IResult ImportProfiles(
HttpContext context,
[FromBody] ImportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.Bundle == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Bundle is required.",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
// Create an export service with save capability
var importExportService = new ProfileExportService(
timeProvider: TimeProvider.System,
profileLookup: id => profileService.GetProfile(id),
lifecycleLookup: null,
profileSave: profile => profileService.RegisterProfile(profile),
keyLookup: null);
var result = importExportService.Import(request, actorId);
return Results.Ok(new ImportResponse(result));
}
private static IResult VerifyBundle(
HttpContext context,
[FromBody] RiskProfileBundle bundle,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (bundle == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Bundle is required.",
Status = StatusCodes.Status400BadRequest
});
}
var verification = exportService.VerifySignature(bundle);
return Results.Ok(new VerifyResponse(verification, bundle.Metadata));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Response DTOs
internal sealed record ExportResponse(RiskProfileBundle Bundle);
internal sealed record ImportResponse(ImportResult Result);
internal sealed record VerifyResponse(
SignatureVerificationResult Verification,
BundleMetadata Metadata);
#endregion

View File

@@ -0,0 +1,433 @@
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Simulation;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class RiskSimulationEndpoints
{
public static IEndpointRouteBuilder MapRiskSimulation(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/simulation")
.RequireAuthorization()
.WithTags("Risk Simulation");
group.MapPost("/", RunSimulation)
.WithName("RunRiskSimulation")
.WithSummary("Run a risk simulation with score distributions and contribution breakdowns.")
.Produces<RiskSimulationResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/quick", RunQuickSimulation)
.WithName("RunQuickRiskSimulation")
.WithSummary("Run a quick risk simulation without detailed breakdowns.")
.Produces<QuickSimulationResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/compare", CompareProfiles)
.WithName("CompareProfileSimulations")
.WithSummary("Compare risk scoring between two profile configurations.")
.Produces<ProfileComparisonResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapPost("/whatif", RunWhatIfSimulation)
.WithName("RunWhatIfSimulation")
.WithSummary("Run a what-if simulation with hypothetical signal changes.")
.Produces<WhatIfSimulationResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
return endpoints;
}
private static IResult RunSimulation(
HttpContext context,
[FromBody] RiskSimulationRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
if (request.Findings == null || request.Findings.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one finding is required.",
Status = StatusCodes.Status400BadRequest
});
}
try
{
var result = simulationService.Simulate(request);
return Results.Ok(new RiskSimulationResponse(result));
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status404NotFound
});
}
}
private static IResult RunQuickSimulation(
HttpContext context,
[FromBody] QuickSimulationRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
var fullRequest = new RiskSimulationRequest(
ProfileId: request.ProfileId,
ProfileVersion: request.ProfileVersion,
Findings: request.Findings,
IncludeContributions: false,
IncludeDistribution: true,
Mode: SimulationMode.Quick);
try
{
var result = simulationService.Simulate(fullRequest);
var quickResponse = new QuickSimulationResponse(
SimulationId: result.SimulationId,
ProfileId: result.ProfileId,
ProfileVersion: result.ProfileVersion,
Timestamp: result.Timestamp,
AggregateMetrics: result.AggregateMetrics,
Distribution: result.Distribution,
ExecutionTimeMs: result.ExecutionTimeMs);
return Results.Ok(quickResponse);
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status404NotFound
});
}
}
private static IResult CompareProfiles(
HttpContext context,
[FromBody] ProfileComparisonRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null ||
string.IsNullOrWhiteSpace(request.BaseProfileId) ||
string.IsNullOrWhiteSpace(request.CompareProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Both BaseProfileId and CompareProfileId are required.",
Status = StatusCodes.Status400BadRequest
});
}
try
{
var baseRequest = new RiskSimulationRequest(
ProfileId: request.BaseProfileId,
ProfileVersion: request.BaseProfileVersion,
Findings: request.Findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var compareRequest = new RiskSimulationRequest(
ProfileId: request.CompareProfileId,
ProfileVersion: request.CompareProfileVersion,
Findings: request.Findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var baseResult = simulationService.Simulate(baseRequest);
var compareResult = simulationService.Simulate(compareRequest);
var deltas = ComputeDeltas(baseResult, compareResult);
return Results.Ok(new ProfileComparisonResponse(
BaseProfile: new ProfileSimulationSummary(
baseResult.ProfileId,
baseResult.ProfileVersion,
baseResult.AggregateMetrics),
CompareProfile: new ProfileSimulationSummary(
compareResult.ProfileId,
compareResult.ProfileVersion,
compareResult.AggregateMetrics),
Deltas: deltas));
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult RunWhatIfSimulation(
HttpContext context,
[FromBody] WhatIfSimulationRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
try
{
// Run baseline simulation
var baselineRequest = new RiskSimulationRequest(
ProfileId: request.ProfileId,
ProfileVersion: request.ProfileVersion,
Findings: request.Findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var baselineResult = simulationService.Simulate(baselineRequest);
// Apply hypothetical changes to findings and re-simulate
var modifiedFindings = ApplyHypotheticalChanges(request.Findings, request.HypotheticalChanges);
var modifiedRequest = new RiskSimulationRequest(
ProfileId: request.ProfileId,
ProfileVersion: request.ProfileVersion,
Findings: modifiedFindings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.WhatIf);
var modifiedResult = simulationService.Simulate(modifiedRequest);
return Results.Ok(new WhatIfSimulationResponse(
BaselineResult: baselineResult,
ModifiedResult: modifiedResult,
ImpactSummary: ComputeImpactSummary(baselineResult, modifiedResult)));
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status404NotFound
});
}
}
private static ComparisonDeltas ComputeDeltas(
RiskSimulationResult baseResult,
RiskSimulationResult compareResult)
{
return new ComparisonDeltas(
MeanScoreDelta: compareResult.AggregateMetrics.MeanScore - baseResult.AggregateMetrics.MeanScore,
MedianScoreDelta: compareResult.AggregateMetrics.MedianScore - baseResult.AggregateMetrics.MedianScore,
CriticalCountDelta: compareResult.AggregateMetrics.CriticalCount - baseResult.AggregateMetrics.CriticalCount,
HighCountDelta: compareResult.AggregateMetrics.HighCount - baseResult.AggregateMetrics.HighCount,
MediumCountDelta: compareResult.AggregateMetrics.MediumCount - baseResult.AggregateMetrics.MediumCount,
LowCountDelta: compareResult.AggregateMetrics.LowCount - baseResult.AggregateMetrics.LowCount);
}
private static IReadOnlyList<SimulationFinding> ApplyHypotheticalChanges(
IReadOnlyList<SimulationFinding> findings,
IReadOnlyList<HypotheticalChange> changes)
{
var result = new List<SimulationFinding>();
foreach (var finding in findings)
{
var modifiedSignals = new Dictionary<string, object?>(finding.Signals);
foreach (var change in changes)
{
if (change.ApplyToAll || change.FindingIds.Contains(finding.FindingId))
{
modifiedSignals[change.SignalName] = change.NewValue;
}
}
result.Add(finding with { Signals = modifiedSignals });
}
return result.AsReadOnly();
}
private static WhatIfImpactSummary ComputeImpactSummary(
RiskSimulationResult baseline,
RiskSimulationResult modified)
{
var baseScores = baseline.FindingScores.ToDictionary(f => f.FindingId, f => f.NormalizedScore);
var modScores = modified.FindingScores.ToDictionary(f => f.FindingId, f => f.NormalizedScore);
var improved = 0;
var worsened = 0;
var unchanged = 0;
var totalDelta = 0.0;
foreach (var (findingId, baseScore) in baseScores)
{
if (modScores.TryGetValue(findingId, out var modScore))
{
var delta = modScore - baseScore;
totalDelta += delta;
if (Math.Abs(delta) < 0.1)
unchanged++;
else if (delta < 0)
improved++;
else
worsened++;
}
}
return new WhatIfImpactSummary(
FindingsImproved: improved,
FindingsWorsened: worsened,
FindingsUnchanged: unchanged,
AverageScoreDelta: baseline.FindingScores.Count > 0
? totalDelta / baseline.FindingScores.Count
: 0,
SeverityShifts: new SeverityShifts(
ToLower: improved,
ToHigher: worsened,
Unchanged: unchanged));
}
}
#region Request/Response DTOs
internal sealed record RiskSimulationResponse(RiskSimulationResult Result);
internal sealed record QuickSimulationRequest(
string ProfileId,
string? ProfileVersion,
IReadOnlyList<SimulationFinding> Findings);
internal sealed record QuickSimulationResponse(
string SimulationId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
AggregateRiskMetrics AggregateMetrics,
RiskDistribution? Distribution,
double ExecutionTimeMs);
internal sealed record ProfileComparisonRequest(
string BaseProfileId,
string? BaseProfileVersion,
string CompareProfileId,
string? CompareProfileVersion,
IReadOnlyList<SimulationFinding> Findings);
internal sealed record ProfileComparisonResponse(
ProfileSimulationSummary BaseProfile,
ProfileSimulationSummary CompareProfile,
ComparisonDeltas Deltas);
internal sealed record ProfileSimulationSummary(
string ProfileId,
string ProfileVersion,
AggregateRiskMetrics Metrics);
internal sealed record ComparisonDeltas(
double MeanScoreDelta,
double MedianScoreDelta,
int CriticalCountDelta,
int HighCountDelta,
int MediumCountDelta,
int LowCountDelta);
internal sealed record WhatIfSimulationRequest(
string ProfileId,
string? ProfileVersion,
IReadOnlyList<SimulationFinding> Findings,
IReadOnlyList<HypotheticalChange> HypotheticalChanges);
internal sealed record HypotheticalChange(
string SignalName,
object? NewValue,
bool ApplyToAll = true,
IReadOnlyList<string>? FindingIds = null)
{
public IReadOnlyList<string> FindingIds { get; init; } = FindingIds ?? Array.Empty<string>();
}
internal sealed record WhatIfSimulationResponse(
RiskSimulationResult BaselineResult,
RiskSimulationResult ModifiedResult,
WhatIfImpactSummary ImpactSummary);
internal sealed record WhatIfImpactSummary(
int FindingsImproved,
int FindingsWorsened,
int FindingsUnchanged,
double AverageScoreDelta,
SeverityShifts SeverityShifts);
internal sealed record SeverityShifts(
int ToLower,
int ToHigher,
int Unchanged);
#endregion

View File

@@ -0,0 +1,290 @@
using System.Security.Claims;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Scope;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class ScopeAttachmentEndpoints
{
public static IEndpointRouteBuilder MapScopeAttachments(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/scopes")
.RequireAuthorization()
.WithTags("Risk Profile Scopes");
group.MapPost("/attachments", CreateAttachment)
.WithName("CreateScopeAttachment")
.WithSummary("Attach a risk profile to a scope (organization, project, environment, or component).")
.Produces<ScopeAttachmentResponse>(StatusCodes.Status201Created)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapGet("/attachments/{attachmentId}", GetAttachment)
.WithName("GetScopeAttachment")
.WithSummary("Get a scope attachment by ID.")
.Produces<ScopeAttachmentResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapDelete("/attachments/{attachmentId}", DeleteAttachment)
.WithName("DeleteScopeAttachment")
.WithSummary("Delete a scope attachment.")
.Produces(StatusCodes.Status204NoContent)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/attachments/{attachmentId}:expire", ExpireAttachment)
.WithName("ExpireScopeAttachment")
.WithSummary("Expire a scope attachment immediately.")
.Produces<ScopeAttachmentResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/attachments", ListAttachments)
.WithName("ListScopeAttachments")
.WithSummary("List scope attachments with optional filtering.")
.Produces<ScopeAttachmentListResponse>(StatusCodes.Status200OK);
group.MapPost("/resolve", ResolveScope)
.WithName("ResolveScope")
.WithSummary("Resolve the effective risk profile for a given scope selector.")
.Produces<ScopeResolutionResponse>(StatusCodes.Status200OK);
group.MapGet("/{scopeType}/{scopeId}/attachments", GetScopeAttachments)
.WithName("GetScopeAttachments")
.WithSummary("Get all attachments for a specific scope.")
.Produces<ScopeAttachmentListResponse>(StatusCodes.Status200OK);
return endpoints;
}
private static IResult CreateAttachment(
HttpContext context,
[FromBody] CreateScopeAttachmentRequest request,
ScopeAttachmentService attachmentService,
RiskProfileConfigurationService profileService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
// Verify profile exists
var profile = profileService.GetProfile(request.ProfileId);
if (profile == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profile not found",
Detail = $"Risk profile '{request.ProfileId}' was not found.",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
try
{
var attachment = attachmentService.Create(request, actorId);
return Results.Created(
$"/api/risk/scopes/attachments/{attachment.Id}",
new ScopeAttachmentResponse(attachment));
}
catch (ArgumentException ex)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult GetAttachment(
HttpContext context,
[FromRoute] string attachmentId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var attachment = attachmentService.Get(attachmentId);
if (attachment == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Attachment not found",
Detail = $"Scope attachment '{attachmentId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new ScopeAttachmentResponse(attachment));
}
private static IResult DeleteAttachment(
HttpContext context,
[FromRoute] string attachmentId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (!attachmentService.Delete(attachmentId))
{
return Results.NotFound(new ProblemDetails
{
Title = "Attachment not found",
Detail = $"Scope attachment '{attachmentId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.NoContent();
}
private static IResult ExpireAttachment(
HttpContext context,
[FromRoute] string attachmentId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
var actorId = ResolveActorId(context);
var attachment = attachmentService.Expire(attachmentId, actorId);
if (attachment == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Attachment not found",
Detail = $"Scope attachment '{attachmentId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new ScopeAttachmentResponse(attachment));
}
private static IResult ListAttachments(
HttpContext context,
[FromQuery] ScopeType? scopeType,
[FromQuery] string? scopeId,
[FromQuery] string? profileId,
[FromQuery] bool includeExpired,
[FromQuery] int limit,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var query = new ScopeAttachmentQuery(
ScopeType: scopeType,
ScopeId: scopeId,
ProfileId: profileId,
IncludeExpired: includeExpired,
Limit: limit > 0 ? limit : 100);
var attachments = attachmentService.Query(query);
return Results.Ok(new ScopeAttachmentListResponse(attachments));
}
private static IResult ResolveScope(
HttpContext context,
[FromBody] ScopeSelector selector,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (selector == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Scope selector is required.",
Status = StatusCodes.Status400BadRequest
});
}
var result = attachmentService.Resolve(selector);
return Results.Ok(new ScopeResolutionResponse(result));
}
private static IResult GetScopeAttachments(
HttpContext context,
[FromRoute] ScopeType scopeType,
[FromRoute] string scopeId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var attachments = attachmentService.GetAttachmentsForScope(scopeType, scopeId);
return Results.Ok(new ScopeAttachmentListResponse(attachments));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Response DTOs
internal sealed record ScopeAttachmentResponse(ScopeAttachment Attachment);
internal sealed record ScopeAttachmentListResponse(IReadOnlyList<ScopeAttachment> Attachments);
internal sealed record ScopeResolutionResponse(ScopeResolutionResult Result);
#endregion

View File

@@ -17,7 +17,32 @@ internal sealed record PolicyEvaluationContext(
PolicyEvaluationAdvisory Advisory,
PolicyEvaluationVexEvidence Vex,
PolicyEvaluationSbom Sbom,
PolicyEvaluationExceptions Exceptions);
PolicyEvaluationExceptions Exceptions,
PolicyEvaluationReachability Reachability,
DateTimeOffset? EvaluationTimestamp = null)
{
/// <summary>
/// Gets the evaluation timestamp for deterministic time-based operations.
/// This value is injected at evaluation time rather than using DateTime.UtcNow
/// to ensure deterministic, reproducible results.
/// </summary>
public DateTimeOffset Now => EvaluationTimestamp ?? DateTimeOffset.MinValue;
/// <summary>
/// Creates a context without reachability data (for backwards compatibility).
/// </summary>
public PolicyEvaluationContext(
PolicyEvaluationSeverity severity,
PolicyEvaluationEnvironment environment,
PolicyEvaluationAdvisory advisory,
PolicyEvaluationVexEvidence vex,
PolicyEvaluationSbom sbom,
PolicyEvaluationExceptions exceptions,
DateTimeOffset? evaluationTimestamp = null)
: this(severity, environment, advisory, vex, sbom, exceptions, PolicyEvaluationReachability.Unknown, evaluationTimestamp)
{
}
}
internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null);
@@ -158,3 +183,96 @@ internal sealed record PolicyExceptionApplication(
string AppliedStatus,
string? AppliedSeverity,
ImmutableDictionary<string, string> Metadata);
/// <summary>
/// Reachability evidence for policy evaluation.
/// </summary>
internal sealed record PolicyEvaluationReachability(
string State,
decimal Confidence,
decimal Score,
bool HasRuntimeEvidence,
string? Source,
string? Method,
string? EvidenceRef)
{
/// <summary>
/// Default unknown reachability state.
/// </summary>
public static readonly PolicyEvaluationReachability Unknown = new(
State: "unknown",
Confidence: 0m,
Score: 0m,
HasRuntimeEvidence: false,
Source: null,
Method: null,
EvidenceRef: null);
/// <summary>
/// Reachable state.
/// </summary>
public static PolicyEvaluationReachability Reachable(
decimal confidence = 1m,
decimal score = 1m,
bool hasRuntimeEvidence = false,
string? source = null,
string? method = null) => new(
State: "reachable",
Confidence: confidence,
Score: score,
HasRuntimeEvidence: hasRuntimeEvidence,
Source: source,
Method: method,
EvidenceRef: null);
/// <summary>
/// Unreachable state.
/// </summary>
public static PolicyEvaluationReachability Unreachable(
decimal confidence = 1m,
bool hasRuntimeEvidence = false,
string? source = null,
string? method = null) => new(
State: "unreachable",
Confidence: confidence,
Score: 0m,
HasRuntimeEvidence: hasRuntimeEvidence,
Source: source,
Method: method,
EvidenceRef: null);
/// <summary>
/// Whether the reachability state is definitively reachable.
/// </summary>
public bool IsReachable => State.Equals("reachable", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether the reachability state is definitively unreachable.
/// </summary>
public bool IsUnreachable => State.Equals("unreachable", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether the reachability state is unknown.
/// </summary>
public bool IsUnknown => State.Equals("unknown", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether the reachability state is under investigation.
/// </summary>
public bool IsUnderInvestigation => State.Equals("under_investigation", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether this reachability data has high confidence (>= 0.8).
/// </summary>
public bool IsHighConfidence => Confidence >= 0.8m;
/// <summary>
/// Whether this reachability data has medium confidence (>= 0.5 and &lt; 0.8).
/// </summary>
public bool IsMediumConfidence => Confidence >= 0.5m && Confidence < 0.8m;
/// <summary>
/// Whether this reachability data has low confidence (&lt; 0.5).
/// </summary>
public bool IsLowConfidence => Confidence < 0.5m;
}

View File

@@ -63,6 +63,8 @@ internal sealed class PolicyExpressionEvaluator
"vex" => new EvaluationValue(new VexScope(this, context.Vex)),
"advisory" => new EvaluationValue(new AdvisoryScope(context.Advisory)),
"sbom" => new EvaluationValue(new SbomScope(context.Sbom)),
"reachability" => new EvaluationValue(new ReachabilityScope(context.Reachability)),
"now" => new EvaluationValue(context.Now),
"true" => EvaluationValue.True,
"false" => EvaluationValue.False,
_ => EvaluationValue.Null,
@@ -98,6 +100,11 @@ internal sealed class PolicyExpressionEvaluator
return sbom.Get(member.Member);
}
if (raw is ReachabilityScope reachability)
{
return reachability.Get(member.Member);
}
if (raw is ComponentScope componentScope)
{
return componentScope.Get(member.Member);
@@ -811,4 +818,51 @@ internal sealed class PolicyExpressionEvaluator
return vex.Statements[^1];
}
}
/// <summary>
/// SPL scope for reachability predicates.
/// Provides access to reachability state, confidence, score, and evidence.
/// </summary>
/// <example>
/// SPL predicates supported:
/// - reachability.state == "reachable"
/// - reachability.state == "unreachable"
/// - reachability.state == "unknown"
/// - reachability.confidence >= 0.8
/// - reachability.score > 0.5
/// - reachability.has_runtime_evidence == true
/// - reachability.is_reachable == true
/// - reachability.is_unreachable == true
/// - reachability.is_high_confidence == true
/// - reachability.source == "grype"
/// - reachability.method == "static"
/// </example>
private sealed class ReachabilityScope
{
private readonly PolicyEvaluationReachability reachability;
public ReachabilityScope(PolicyEvaluationReachability reachability)
{
this.reachability = reachability;
}
public EvaluationValue Get(string member) => member.ToLowerInvariant() switch
{
"state" => new EvaluationValue(reachability.State),
"confidence" => new EvaluationValue(reachability.Confidence),
"score" => new EvaluationValue(reachability.Score),
"has_runtime_evidence" or "hasruntimeevidence" => new EvaluationValue(reachability.HasRuntimeEvidence),
"source" => new EvaluationValue(reachability.Source),
"method" => new EvaluationValue(reachability.Method),
"evidence_ref" or "evidenceref" => new EvaluationValue(reachability.EvidenceRef),
"is_reachable" or "isreachable" => new EvaluationValue(reachability.IsReachable),
"is_unreachable" or "isunreachable" => new EvaluationValue(reachability.IsUnreachable),
"is_unknown" or "isunknown" => new EvaluationValue(reachability.IsUnknown),
"is_under_investigation" or "isunderinvestigation" => new EvaluationValue(reachability.IsUnderInvestigation),
"is_high_confidence" or "ishighconfidence" => new EvaluationValue(reachability.IsHighConfidence),
"is_medium_confidence" or "ismediumconfidence" => new EvaluationValue(reachability.IsMediumConfidence),
"is_low_confidence" or "islowconfidence" => new EvaluationValue(reachability.IsLowConfidence),
_ => EvaluationValue.Null,
};
}
}

View File

@@ -0,0 +1,172 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Lifecycle;
namespace StellaOps.Policy.Engine.Events;
/// <summary>
/// Base class for profile lifecycle events.
/// </summary>
public abstract record ProfileEvent(
[property: JsonPropertyName("event_id")] string EventId,
[property: JsonPropertyName("event_type")] ProfileEventType EventType,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string ProfileVersion,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("actor")] string? Actor,
[property: JsonPropertyName("correlation_id")] string? CorrelationId);
/// <summary>
/// Type of profile event.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ProfileEventType>))]
public enum ProfileEventType
{
[JsonPropertyName("profile_created")]
ProfileCreated,
[JsonPropertyName("profile_published")]
ProfilePublished,
[JsonPropertyName("profile_activated")]
ProfileActivated,
[JsonPropertyName("profile_deprecated")]
ProfileDeprecated,
[JsonPropertyName("profile_archived")]
ProfileArchived,
[JsonPropertyName("severity_threshold_changed")]
SeverityThresholdChanged,
[JsonPropertyName("weight_changed")]
WeightChanged,
[JsonPropertyName("override_added")]
OverrideAdded,
[JsonPropertyName("override_removed")]
OverrideRemoved,
[JsonPropertyName("scope_attached")]
ScopeAttached,
[JsonPropertyName("scope_detached")]
ScopeDetached
}
/// <summary>
/// Event emitted when a profile is created.
/// </summary>
public sealed record ProfileCreatedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("content_hash")] string ContentHash,
[property: JsonPropertyName("description")] string? Description)
: ProfileEvent(EventId, ProfileEventType.ProfileCreated, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a profile is published/activated.
/// </summary>
public sealed record ProfilePublishedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("content_hash")] string ContentHash,
[property: JsonPropertyName("previous_active_version")] string? PreviousActiveVersion)
: ProfileEvent(EventId, ProfileEventType.ProfilePublished, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a profile is deprecated.
/// </summary>
public sealed record ProfileDeprecatedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("reason")] string? Reason,
[property: JsonPropertyName("successor_version")] string? SuccessorVersion)
: ProfileEvent(EventId, ProfileEventType.ProfileDeprecated, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a profile is archived.
/// </summary>
public sealed record ProfileArchivedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId)
: ProfileEvent(EventId, ProfileEventType.ProfileArchived, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when severity thresholds change.
/// </summary>
public sealed record SeverityThresholdChangedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("changes")] IReadOnlyList<ThresholdChange> Changes)
: ProfileEvent(EventId, ProfileEventType.SeverityThresholdChanged, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Details of a threshold change.
/// </summary>
public sealed record ThresholdChange(
[property: JsonPropertyName("threshold_name")] string ThresholdName,
[property: JsonPropertyName("old_value")] double? OldValue,
[property: JsonPropertyName("new_value")] double? NewValue);
/// <summary>
/// Event emitted when weights change.
/// </summary>
public sealed record WeightChangedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("signal_name")] string SignalName,
[property: JsonPropertyName("old_weight")] double OldWeight,
[property: JsonPropertyName("new_weight")] double NewWeight)
: ProfileEvent(EventId, ProfileEventType.WeightChanged, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a scope is attached.
/// </summary>
public sealed record ScopeAttachedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("scope_type")] string ScopeType,
[property: JsonPropertyName("scope_id")] string ScopeId,
[property: JsonPropertyName("attachment_id")] string AttachmentId)
: ProfileEvent(EventId, ProfileEventType.ScopeAttached, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event subscription request.
/// </summary>
public sealed record EventSubscription(
[property: JsonPropertyName("subscription_id")] string SubscriptionId,
[property: JsonPropertyName("event_types")] IReadOnlyList<ProfileEventType> EventTypes,
[property: JsonPropertyName("profile_filter")] string? ProfileFilter,
[property: JsonPropertyName("webhook_url")] string? WebhookUrl,
[property: JsonPropertyName("created_at")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("created_by")] string? CreatedBy);

View File

@@ -0,0 +1,412 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.Events;
/// <summary>
/// Service for publishing and managing profile lifecycle events.
/// </summary>
public sealed class ProfileEventPublisher
{
private readonly ILogger<ProfileEventPublisher> _logger;
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<string, EventSubscription> _subscriptions;
private readonly ConcurrentDictionary<string, ConcurrentQueue<ProfileEvent>> _eventQueues;
private readonly ConcurrentQueue<ProfileEvent> _globalEventStream;
private readonly List<Func<ProfileEvent, Task>> _eventHandlers;
private readonly object _handlersLock = new();
private const int MaxEventsPerQueue = 10000;
private const int MaxGlobalEvents = 50000;
public ProfileEventPublisher(
ILogger<ProfileEventPublisher> logger,
TimeProvider timeProvider)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_subscriptions = new ConcurrentDictionary<string, EventSubscription>(StringComparer.OrdinalIgnoreCase);
_eventQueues = new ConcurrentDictionary<string, ConcurrentQueue<ProfileEvent>>(StringComparer.OrdinalIgnoreCase);
_globalEventStream = new ConcurrentQueue<ProfileEvent>();
_eventHandlers = new List<Func<ProfileEvent, Task>>();
}
/// <summary>
/// Publishes a profile created event.
/// </summary>
public async Task PublishProfileCreatedAsync(
string profileId,
string version,
string contentHash,
string? description,
string? actor,
string? correlationId = null)
{
var evt = new ProfileCreatedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
ContentHash: contentHash,
Description: description);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a profile published/activated event.
/// </summary>
public async Task PublishProfilePublishedAsync(
string profileId,
string version,
string contentHash,
string? previousActiveVersion,
string? actor,
string? correlationId = null)
{
var evt = new ProfilePublishedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
ContentHash: contentHash,
PreviousActiveVersion: previousActiveVersion);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a profile deprecated event.
/// </summary>
public async Task PublishProfileDeprecatedAsync(
string profileId,
string version,
string? reason,
string? successorVersion,
string? actor,
string? correlationId = null)
{
var evt = new ProfileDeprecatedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
Reason: reason,
SuccessorVersion: successorVersion);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a profile archived event.
/// </summary>
public async Task PublishProfileArchivedAsync(
string profileId,
string version,
string? actor,
string? correlationId = null)
{
var evt = new ProfileArchivedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a severity threshold changed event.
/// </summary>
public async Task PublishSeverityThresholdChangedAsync(
string profileId,
string version,
IReadOnlyList<ThresholdChange> changes,
string? actor,
string? correlationId = null)
{
var evt = new SeverityThresholdChangedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
Changes: changes);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a weight changed event.
/// </summary>
public async Task PublishWeightChangedAsync(
string profileId,
string version,
string signalName,
double oldWeight,
double newWeight,
string? actor,
string? correlationId = null)
{
var evt = new WeightChangedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
SignalName: signalName,
OldWeight: oldWeight,
NewWeight: newWeight);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a scope attached event.
/// </summary>
public async Task PublishScopeAttachedAsync(
string profileId,
string version,
string scopeType,
string scopeId,
string attachmentId,
string? actor,
string? correlationId = null)
{
var evt = new ScopeAttachedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
ScopeType: scopeType,
ScopeId: scopeId,
AttachmentId: attachmentId);
await PublishAsync(evt);
}
/// <summary>
/// Registers an event handler.
/// </summary>
public void RegisterHandler(Func<ProfileEvent, Task> handler)
{
ArgumentNullException.ThrowIfNull(handler);
lock (_handlersLock)
{
_eventHandlers.Add(handler);
}
}
/// <summary>
/// Creates a subscription for events.
/// </summary>
public EventSubscription Subscribe(
IReadOnlyList<ProfileEventType> eventTypes,
string? profileFilter,
string? webhookUrl,
string? createdBy)
{
var subscription = new EventSubscription(
SubscriptionId: GenerateSubscriptionId(),
EventTypes: eventTypes,
ProfileFilter: profileFilter,
WebhookUrl: webhookUrl,
CreatedAt: _timeProvider.GetUtcNow(),
CreatedBy: createdBy);
_subscriptions[subscription.SubscriptionId] = subscription;
_eventQueues[subscription.SubscriptionId] = new ConcurrentQueue<ProfileEvent>();
return subscription;
}
/// <summary>
/// Unsubscribes from events.
/// </summary>
public bool Unsubscribe(string subscriptionId)
{
var removed = _subscriptions.TryRemove(subscriptionId, out _);
_eventQueues.TryRemove(subscriptionId, out _);
return removed;
}
/// <summary>
/// Gets events for a subscription.
/// </summary>
public IReadOnlyList<ProfileEvent> GetEvents(string subscriptionId, int limit = 100)
{
if (!_eventQueues.TryGetValue(subscriptionId, out var queue))
{
return Array.Empty<ProfileEvent>();
}
var events = new List<ProfileEvent>();
while (events.Count < limit && queue.TryDequeue(out var evt))
{
events.Add(evt);
}
return events.AsReadOnly();
}
/// <summary>
/// Gets recent events from the global stream.
/// </summary>
public IReadOnlyList<ProfileEvent> GetRecentEvents(int limit = 100)
{
return _globalEventStream
.ToArray()
.OrderByDescending(e => e.Timestamp)
.Take(limit)
.ToList()
.AsReadOnly();
}
/// <summary>
/// Gets events filtered by criteria.
/// </summary>
public IReadOnlyList<ProfileEvent> GetEventsFiltered(
ProfileEventType? eventType,
string? profileId,
DateTimeOffset? since,
int limit = 100)
{
IEnumerable<ProfileEvent> events = _globalEventStream.ToArray();
if (eventType.HasValue)
{
events = events.Where(e => e.EventType == eventType.Value);
}
if (!string.IsNullOrWhiteSpace(profileId))
{
events = events.Where(e => e.ProfileId.Equals(profileId, StringComparison.OrdinalIgnoreCase));
}
if (since.HasValue)
{
events = events.Where(e => e.Timestamp >= since.Value);
}
return events
.OrderByDescending(e => e.Timestamp)
.Take(limit)
.ToList()
.AsReadOnly();
}
private async Task PublishAsync(ProfileEvent evt)
{
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("profile_event.publish");
activity?.SetTag("event.type", evt.EventType.ToString());
activity?.SetTag("profile.id", evt.ProfileId);
// Add to global stream
_globalEventStream.Enqueue(evt);
// Trim global stream if too large
while (_globalEventStream.Count > MaxGlobalEvents)
{
_globalEventStream.TryDequeue(out _);
}
// Distribute to matching subscriptions
foreach (var (subscriptionId, subscription) in _subscriptions)
{
if (MatchesSubscription(evt, subscription))
{
if (_eventQueues.TryGetValue(subscriptionId, out var queue))
{
queue.Enqueue(evt);
// Trim queue if too large
while (queue.Count > MaxEventsPerQueue)
{
queue.TryDequeue(out _);
}
}
}
}
// Invoke registered handlers
List<Func<ProfileEvent, Task>> handlers;
lock (_handlersLock)
{
handlers = _eventHandlers.ToList();
}
foreach (var handler in handlers)
{
try
{
await handler(evt).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error invoking event handler for {EventType}", evt.EventType);
}
}
PolicyEngineTelemetry.ProfileEventsPublished.Add(1);
_logger.LogInformation(
"Published {EventType} event for profile {ProfileId} v{Version}",
evt.EventType, evt.ProfileId, evt.ProfileVersion);
}
private static bool MatchesSubscription(ProfileEvent evt, EventSubscription subscription)
{
// Check event type filter
if (!subscription.EventTypes.Contains(evt.EventType))
{
return false;
}
// Check profile filter (supports wildcards)
if (!string.IsNullOrWhiteSpace(subscription.ProfileFilter))
{
if (subscription.ProfileFilter.EndsWith("*"))
{
var prefix = subscription.ProfileFilter[..^1];
if (!evt.ProfileId.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return false;
}
}
else if (!evt.ProfileId.Equals(subscription.ProfileFilter, StringComparison.OrdinalIgnoreCase))
{
return false;
}
}
return true;
}
private static string GenerateEventId()
{
var guid = Guid.NewGuid().ToByteArray();
return $"pev-{Convert.ToHexStringLower(guid)[..16]}";
}
private static string GenerateSubscriptionId()
{
var guid = Guid.NewGuid().ToByteArray();
return $"psub-{Convert.ToHexStringLower(guid)[..16]}";
}
}

View File

@@ -0,0 +1,376 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Materialization;
/// <summary>
/// Represents an effective finding after policy evaluation.
/// Stored in tenant-scoped collections: effective_finding_{policyId}.
/// </summary>
public sealed record EffectiveFinding
{
/// <summary>
/// Unique identifier for this effective finding.
/// Format: sha256:{hash of tenantId|policyId|componentPurl|advisoryId}
/// </summary>
[JsonPropertyName("_id")]
public required string Id { get; init; }
/// <summary>
/// Tenant identifier (normalized to lowercase).
/// </summary>
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
/// <summary>
/// Policy identifier that produced this finding.
/// </summary>
[JsonPropertyName("policyId")]
public required string PolicyId { get; init; }
/// <summary>
/// Policy version at time of evaluation.
/// </summary>
[JsonPropertyName("policyVersion")]
public required int PolicyVersion { get; init; }
/// <summary>
/// Component PURL from the SBOM.
/// </summary>
[JsonPropertyName("componentPurl")]
public required string ComponentPurl { get; init; }
/// <summary>
/// Component name.
/// </summary>
[JsonPropertyName("componentName")]
public required string ComponentName { get; init; }
/// <summary>
/// Component version.
/// </summary>
[JsonPropertyName("componentVersion")]
public required string ComponentVersion { get; init; }
/// <summary>
/// Advisory identifier (CVE, GHSA, etc.).
/// </summary>
[JsonPropertyName("advisoryId")]
public required string AdvisoryId { get; init; }
/// <summary>
/// Advisory source.
/// </summary>
[JsonPropertyName("advisorySource")]
public required string AdvisorySource { get; init; }
/// <summary>
/// Policy evaluation status (affected, blocked, suppressed, etc.).
/// </summary>
[JsonPropertyName("status")]
public required string Status { get; init; }
/// <summary>
/// Normalized severity (Critical, High, Medium, Low, etc.).
/// </summary>
[JsonPropertyName("severity")]
public string? Severity { get; init; }
/// <summary>
/// Rule name that matched (if any).
/// </summary>
[JsonPropertyName("ruleName")]
public string? RuleName { get; init; }
/// <summary>
/// VEX status overlay (if VEX was applied).
/// </summary>
[JsonPropertyName("vexStatus")]
public string? VexStatus { get; init; }
/// <summary>
/// VEX justification (if VEX was applied).
/// </summary>
[JsonPropertyName("vexJustification")]
public string? VexJustification { get; init; }
/// <summary>
/// Policy evaluation annotations.
/// </summary>
[JsonPropertyName("annotations")]
public ImmutableDictionary<string, string> Annotations { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Current history version (incremented on each update).
/// </summary>
[JsonPropertyName("historyVersion")]
public required long HistoryVersion { get; init; }
/// <summary>
/// Reference to the policy run that produced this finding.
/// </summary>
[JsonPropertyName("policyRunId")]
public string? PolicyRunId { get; init; }
/// <summary>
/// Trace ID for distributed tracing.
/// </summary>
[JsonPropertyName("traceId")]
public string? TraceId { get; init; }
/// <summary>
/// Span ID for distributed tracing.
/// </summary>
[JsonPropertyName("spanId")]
public string? SpanId { get; init; }
/// <summary>
/// When this finding was first created.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When this finding was last updated.
/// </summary>
[JsonPropertyName("updatedAt")]
public required DateTimeOffset UpdatedAt { get; init; }
/// <summary>
/// Content hash for deduplication.
/// </summary>
[JsonPropertyName("contentHash")]
public required string ContentHash { get; init; }
/// <summary>
/// Creates a deterministic finding ID.
/// </summary>
public static string CreateId(string tenantId, string policyId, string componentPurl, string advisoryId)
{
var normalizedTenant = (tenantId ?? string.Empty).Trim().ToLowerInvariant();
var normalizedPolicy = (policyId ?? string.Empty).Trim();
var normalizedPurl = (componentPurl ?? string.Empty).Trim().ToLowerInvariant();
var normalizedAdvisory = (advisoryId ?? string.Empty).Trim();
var input = $"{normalizedTenant}|{normalizedPolicy}|{normalizedPurl}|{normalizedAdvisory}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>
/// Computes a content hash for change detection.
/// </summary>
public static string ComputeContentHash(
string status,
string? severity,
string? ruleName,
string? vexStatus,
IReadOnlyDictionary<string, string>? annotations)
{
var sb = new StringBuilder();
sb.Append(status ?? string.Empty);
sb.Append('|');
sb.Append(severity ?? string.Empty);
sb.Append('|');
sb.Append(ruleName ?? string.Empty);
sb.Append('|');
sb.Append(vexStatus ?? string.Empty);
if (annotations is not null)
{
foreach (var kvp in annotations.OrderBy(x => x.Key, StringComparer.OrdinalIgnoreCase))
{
sb.Append('|');
sb.Append(kvp.Key);
sb.Append('=');
sb.Append(kvp.Value);
}
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(sb.ToString()));
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Append-only history entry for effective finding changes.
/// Stored in: effective_finding_history_{policyId}.
/// </summary>
public sealed record EffectiveFindingHistoryEntry
{
/// <summary>
/// Unique identifier for this history entry.
/// </summary>
[JsonPropertyName("_id")]
public required string Id { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
/// <summary>
/// Reference to the effective finding.
/// </summary>
[JsonPropertyName("findingId")]
public required string FindingId { get; init; }
/// <summary>
/// Policy identifier.
/// </summary>
[JsonPropertyName("policyId")]
public required string PolicyId { get; init; }
/// <summary>
/// History version number (monotonically increasing).
/// </summary>
[JsonPropertyName("version")]
public required long Version { get; init; }
/// <summary>
/// Type of change.
/// </summary>
[JsonPropertyName("changeType")]
public required EffectiveFindingChangeType ChangeType { get; init; }
/// <summary>
/// Previous status (for status changes).
/// </summary>
[JsonPropertyName("previousStatus")]
public string? PreviousStatus { get; init; }
/// <summary>
/// New status.
/// </summary>
[JsonPropertyName("newStatus")]
public required string NewStatus { get; init; }
/// <summary>
/// Previous severity (for severity changes).
/// </summary>
[JsonPropertyName("previousSeverity")]
public string? PreviousSeverity { get; init; }
/// <summary>
/// New severity.
/// </summary>
[JsonPropertyName("newSeverity")]
public string? NewSeverity { get; init; }
/// <summary>
/// Previous content hash.
/// </summary>
[JsonPropertyName("previousContentHash")]
public string? PreviContentHash { get; init; }
/// <summary>
/// New content hash.
/// </summary>
[JsonPropertyName("newContentHash")]
public required string NewContentHash { get; init; }
/// <summary>
/// Policy run that triggered this change.
/// </summary>
[JsonPropertyName("policyRunId")]
public string? PolicyRunId { get; init; }
/// <summary>
/// Trace ID for distributed tracing.
/// </summary>
[JsonPropertyName("traceId")]
public string? TraceId { get; init; }
/// <summary>
/// When this change occurred.
/// </summary>
[JsonPropertyName("occurredAt")]
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Creates a deterministic history entry ID.
/// </summary>
public static string CreateId(string findingId, long version)
{
return $"{findingId}:v{version}";
}
}
/// <summary>
/// Type of change to an effective finding.
/// </summary>
public enum EffectiveFindingChangeType
{
/// <summary>Finding was created.</summary>
Created,
/// <summary>Status changed.</summary>
StatusChanged,
/// <summary>Severity changed.</summary>
SeverityChanged,
/// <summary>VEX overlay applied.</summary>
VexApplied,
/// <summary>Annotations changed.</summary>
AnnotationsChanged,
/// <summary>Policy version changed.</summary>
PolicyVersionChanged
}
/// <summary>
/// Input for materializing effective findings.
/// </summary>
public sealed record MaterializeFindingInput
{
public required string TenantId { get; init; }
public required string PolicyId { get; init; }
public required int PolicyVersion { get; init; }
public required string ComponentPurl { get; init; }
public required string ComponentName { get; init; }
public required string ComponentVersion { get; init; }
public required string AdvisoryId { get; init; }
public required string AdvisorySource { get; init; }
public required string Status { get; init; }
public string? Severity { get; init; }
public string? RuleName { get; init; }
public string? VexStatus { get; init; }
public string? VexJustification { get; init; }
public ImmutableDictionary<string, string>? Annotations { get; init; }
public string? PolicyRunId { get; init; }
public string? TraceId { get; init; }
public string? SpanId { get; init; }
}
/// <summary>
/// Result of a materialization operation.
/// </summary>
public sealed record MaterializeFindingResult
{
public required string FindingId { get; init; }
public required bool WasCreated { get; init; }
public required bool WasUpdated { get; init; }
public required long HistoryVersion { get; init; }
public EffectiveFindingChangeType? ChangeType { get; init; }
}
/// <summary>
/// Result of a batch materialization operation.
/// </summary>
public sealed record MaterializeBatchResult
{
public required int TotalInputs { get; init; }
public required int Created { get; init; }
public required int Updated { get; init; }
public required int Unchanged { get; init; }
public required int Errors { get; init; }
public required long ProcessingTimeMs { get; init; }
public ImmutableArray<MaterializeFindingResult> Results { get; init; } =
ImmutableArray<MaterializeFindingResult>.Empty;
}

View File

@@ -0,0 +1,412 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.Materialization;
/// <summary>
/// Interface for the effective finding materialization store.
/// </summary>
public interface IEffectiveFindingStore
{
/// <summary>
/// Gets an effective finding by ID.
/// </summary>
Task<EffectiveFinding?> GetByIdAsync(
string tenantId,
string policyId,
string findingId,
CancellationToken cancellationToken);
/// <summary>
/// Upserts an effective finding (insert or update).
/// </summary>
Task UpsertFindingAsync(
EffectiveFinding finding,
CancellationToken cancellationToken);
/// <summary>
/// Appends a history entry (insert only, never updates).
/// </summary>
Task AppendHistoryAsync(
EffectiveFindingHistoryEntry entry,
CancellationToken cancellationToken);
/// <summary>
/// Gets the collection name for findings.
/// </summary>
string GetFindingsCollectionName(string policyId);
/// <summary>
/// Gets the collection name for history.
/// </summary>
string GetHistoryCollectionName(string policyId);
}
/// <summary>
/// Materializes effective findings from policy evaluation results.
/// Implements upsert semantics with append-only history tracking.
/// </summary>
public sealed class EffectiveFindingWriter
{
private readonly IEffectiveFindingStore _store;
private readonly TimeProvider _timeProvider;
public EffectiveFindingWriter(IEffectiveFindingStore store, TimeProvider? timeProvider = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Materializes a single effective finding.
/// </summary>
public async Task<MaterializeFindingResult> MaterializeAsync(
MaterializeFindingInput input,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(input);
var findingId = EffectiveFinding.CreateId(
input.TenantId,
input.PolicyId,
input.ComponentPurl,
input.AdvisoryId);
var contentHash = EffectiveFinding.ComputeContentHash(
input.Status,
input.Severity,
input.RuleName,
input.VexStatus,
input.Annotations);
var now = _timeProvider.GetUtcNow();
// Try to get existing finding
var existing = await _store.GetByIdAsync(
input.TenantId,
input.PolicyId,
findingId,
cancellationToken).ConfigureAwait(false);
if (existing is null)
{
// Create new finding
var newFinding = CreateFinding(input, findingId, contentHash, now, historyVersion: 1);
await _store.UpsertFindingAsync(newFinding, cancellationToken).ConfigureAwait(false);
// Append creation history
var historyEntry = CreateHistoryEntry(
findingId,
input,
version: 1,
EffectiveFindingChangeType.Created,
previousStatus: null,
previousSeverity: null,
previousContentHash: null,
newContentHash: contentHash,
now);
await _store.AppendHistoryAsync(historyEntry, cancellationToken).ConfigureAwait(false);
return new MaterializeFindingResult
{
FindingId = findingId,
WasCreated = true,
WasUpdated = false,
HistoryVersion = 1,
ChangeType = EffectiveFindingChangeType.Created
};
}
// Check if content changed
if (string.Equals(existing.ContentHash, contentHash, StringComparison.Ordinal))
{
// No change - skip update
return new MaterializeFindingResult
{
FindingId = findingId,
WasCreated = false,
WasUpdated = false,
HistoryVersion = existing.HistoryVersion,
ChangeType = null
};
}
// Determine change type
var changeType = DetermineChangeType(existing, input);
var newVersion = existing.HistoryVersion + 1;
// Update finding
var updatedFinding = CreateFinding(input, findingId, contentHash, existing.CreatedAt, newVersion) with
{
UpdatedAt = now
};
await _store.UpsertFindingAsync(updatedFinding, cancellationToken).ConfigureAwait(false);
// Append history entry
var updateHistory = CreateHistoryEntry(
findingId,
input,
newVersion,
changeType,
existing.Status,
existing.Severity,
existing.ContentHash,
contentHash,
now);
await _store.AppendHistoryAsync(updateHistory, cancellationToken).ConfigureAwait(false);
return new MaterializeFindingResult
{
FindingId = findingId,
WasCreated = false,
WasUpdated = true,
HistoryVersion = newVersion,
ChangeType = changeType
};
}
/// <summary>
/// Materializes a batch of effective findings with deterministic ordering.
/// </summary>
public async Task<MaterializeBatchResult> MaterializeBatchAsync(
IEnumerable<MaterializeFindingInput> inputs,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(inputs);
var stopwatch = Stopwatch.StartNew();
// Process in deterministic order
var orderedInputs = inputs
.OrderBy(i => i.TenantId, StringComparer.OrdinalIgnoreCase)
.ThenBy(i => i.PolicyId, StringComparer.OrdinalIgnoreCase)
.ThenBy(i => i.ComponentPurl, StringComparer.OrdinalIgnoreCase)
.ThenBy(i => i.AdvisoryId, StringComparer.OrdinalIgnoreCase)
.ToList();
var results = new List<MaterializeFindingResult>();
var created = 0;
var updated = 0;
var unchanged = 0;
var errors = 0;
foreach (var input in orderedInputs)
{
try
{
var result = await MaterializeAsync(input, cancellationToken).ConfigureAwait(false);
results.Add(result);
if (result.WasCreated)
{
created++;
}
else if (result.WasUpdated)
{
updated++;
}
else
{
unchanged++;
}
}
catch (OperationCanceledException)
{
throw;
}
catch
{
errors++;
}
}
stopwatch.Stop();
return new MaterializeBatchResult
{
TotalInputs = orderedInputs.Count,
Created = created,
Updated = updated,
Unchanged = unchanged,
Errors = errors,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds,
Results = results.ToImmutableArray()
};
}
private static EffectiveFinding CreateFinding(
MaterializeFindingInput input,
string findingId,
string contentHash,
DateTimeOffset createdAt,
long historyVersion)
{
return new EffectiveFinding
{
Id = findingId,
TenantId = input.TenantId.ToLowerInvariant(),
PolicyId = input.PolicyId,
PolicyVersion = input.PolicyVersion,
ComponentPurl = input.ComponentPurl,
ComponentName = input.ComponentName,
ComponentVersion = input.ComponentVersion,
AdvisoryId = input.AdvisoryId,
AdvisorySource = input.AdvisorySource,
Status = input.Status,
Severity = input.Severity,
RuleName = input.RuleName,
VexStatus = input.VexStatus,
VexJustification = input.VexJustification,
Annotations = input.Annotations ?? ImmutableDictionary<string, string>.Empty,
HistoryVersion = historyVersion,
PolicyRunId = input.PolicyRunId,
TraceId = input.TraceId,
SpanId = input.SpanId,
CreatedAt = createdAt,
UpdatedAt = createdAt,
ContentHash = contentHash
};
}
private static EffectiveFindingHistoryEntry CreateHistoryEntry(
string findingId,
MaterializeFindingInput input,
long version,
EffectiveFindingChangeType changeType,
string? previousStatus,
string? previousSeverity,
string? previousContentHash,
string newContentHash,
DateTimeOffset occurredAt)
{
return new EffectiveFindingHistoryEntry
{
Id = EffectiveFindingHistoryEntry.CreateId(findingId, version),
TenantId = input.TenantId.ToLowerInvariant(),
FindingId = findingId,
PolicyId = input.PolicyId,
Version = version,
ChangeType = changeType,
PreviousStatus = previousStatus,
NewStatus = input.Status,
PreviousSeverity = previousSeverity,
NewSeverity = input.Severity,
PreviContentHash = previousContentHash,
NewContentHash = newContentHash,
PolicyRunId = input.PolicyRunId,
TraceId = input.TraceId,
OccurredAt = occurredAt
};
}
private static EffectiveFindingChangeType DetermineChangeType(
EffectiveFinding existing,
MaterializeFindingInput input)
{
// Check for status change
if (!string.Equals(existing.Status, input.Status, StringComparison.OrdinalIgnoreCase))
{
return EffectiveFindingChangeType.StatusChanged;
}
// Check for severity change
if (!string.Equals(existing.Severity, input.Severity, StringComparison.OrdinalIgnoreCase))
{
return EffectiveFindingChangeType.SeverityChanged;
}
// Check for VEX change
if (!string.Equals(existing.VexStatus, input.VexStatus, StringComparison.OrdinalIgnoreCase))
{
return EffectiveFindingChangeType.VexApplied;
}
// Check for policy version change
if (existing.PolicyVersion != input.PolicyVersion)
{
return EffectiveFindingChangeType.PolicyVersionChanged;
}
// Default to annotations changed
return EffectiveFindingChangeType.AnnotationsChanged;
}
}
/// <summary>
/// In-memory implementation of effective finding store for testing.
/// </summary>
public sealed class InMemoryEffectiveFindingStore : IEffectiveFindingStore
{
private readonly Dictionary<string, EffectiveFinding> _findings = new(StringComparer.OrdinalIgnoreCase);
private readonly List<EffectiveFindingHistoryEntry> _history = new();
private readonly object _lock = new();
public Task<EffectiveFinding?> GetByIdAsync(
string tenantId,
string policyId,
string findingId,
CancellationToken cancellationToken)
{
var key = $"{tenantId.ToLowerInvariant()}:{policyId}:{findingId}";
lock (_lock)
{
_findings.TryGetValue(key, out var finding);
return Task.FromResult(finding);
}
}
public Task UpsertFindingAsync(EffectiveFinding finding, CancellationToken cancellationToken)
{
var key = $"{finding.TenantId}:{finding.PolicyId}:{finding.Id}";
lock (_lock)
{
_findings[key] = finding;
}
return Task.CompletedTask;
}
public Task AppendHistoryAsync(EffectiveFindingHistoryEntry entry, CancellationToken cancellationToken)
{
lock (_lock)
{
_history.Add(entry);
}
return Task.CompletedTask;
}
public string GetFindingsCollectionName(string policyId) =>
$"effective_finding_{policyId.ToLowerInvariant()}";
public string GetHistoryCollectionName(string policyId) =>
$"effective_finding_history_{policyId.ToLowerInvariant()}";
public IReadOnlyList<EffectiveFinding> GetAllFindings()
{
lock (_lock)
{
return _findings.Values.ToList();
}
}
public IReadOnlyList<EffectiveFindingHistoryEntry> GetAllHistory()
{
lock (_lock)
{
return _history.ToList();
}
}
public IReadOnlyList<EffectiveFindingHistoryEntry> GetHistoryForFinding(string findingId)
{
lock (_lock)
{
return _history
.Where(h => h.FindingId == findingId)
.OrderBy(h => h.Version)
.ToList();
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.ObjectModel;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.ReachabilityFacts;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.Options;
@@ -27,6 +28,8 @@ public sealed class PolicyEngineOptions
public PolicyEngineRiskProfileOptions RiskProfile { get; } = new();
public ReachabilityFactsCacheOptions ReachabilityCache { get; } = new();
public void Validate()
{
Authority.Validate();

View File

@@ -16,6 +16,7 @@ using StellaOps.Policy.Engine.Streaming;
using StellaOps.Policy.Engine.Telemetry;
using StellaOps.AirGap.Policy;
using StellaOps.Policy.Engine.Orchestration;
using StellaOps.Policy.Engine.ReachabilityFacts;
var builder = WebApplication.CreateBuilder(args);
@@ -116,8 +117,13 @@ builder.Services.AddSingleton<PolicyEvaluationAttestationService>();
builder.Services.AddSingleton<IncidentModeService>();
builder.Services.AddSingleton<RiskProfileConfigurationService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Lifecycle.RiskProfileLifecycleService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Scope.ScopeAttachmentService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Overrides.OverrideService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.IRiskScoringJobStore, StellaOps.Policy.Engine.Scoring.InMemoryRiskScoringJobStore>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.RiskScoringTriggerService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Simulation.RiskSimulationService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Export.ProfileExportService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Events.ProfileEventPublisher>();
builder.Services.AddHostedService<IncidentModeExpirationWorker>();
builder.Services.AddHostedService<PolicyEngineBootstrapWorker>();
builder.Services.AddSingleton<StellaOps.PolicyDsl.PolicyCompiler>();
@@ -148,6 +154,10 @@ builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.IViolationEvent
builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.ViolationEventService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.SeverityFusionService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.ConflictHandlingService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Services.PolicyDecisionService>();
builder.Services.AddSingleton<IReachabilityFactsStore, InMemoryReachabilityFactsStore>();
builder.Services.AddSingleton<IReachabilityFactsOverlayCache, InMemoryReachabilityFactsOverlayCache>();
builder.Services.AddSingleton<ReachabilityFactsJoiningService>();
builder.Services.AddHttpContextAccessor();
builder.Services.AddRouting(options => options.LowercaseUrls = true);
@@ -205,7 +215,13 @@ app.MapPolicyWorker();
app.MapLedgerExport();
app.MapSnapshots();
app.MapViolations();
app.MapPolicyDecisions();
app.MapRiskProfiles();
app.MapRiskProfileSchema();
app.MapScopeAttachments();
app.MapRiskSimulation();
app.MapOverrides();
app.MapProfileExport();
app.MapProfileEvents();
app.Run();

View File

@@ -0,0 +1,270 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Service for joining reachability facts with policy evaluation inputs.
/// Provides efficient batch lookups with caching and metrics.
/// </summary>
public sealed class ReachabilityFactsJoiningService
{
private readonly IReachabilityFactsStore _store;
private readonly IReachabilityFactsOverlayCache _cache;
private readonly ILogger<ReachabilityFactsJoiningService> _logger;
private readonly TimeProvider _timeProvider;
public ReachabilityFactsJoiningService(
IReachabilityFactsStore store,
IReachabilityFactsOverlayCache cache,
ILogger<ReachabilityFactsJoiningService> logger,
TimeProvider timeProvider)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
/// Gets reachability facts for a batch of component-advisory pairs.
/// Uses cache-first strategy with store fallback.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="items">List of component-advisory pairs.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Batch result with facts and cache statistics.</returns>
public async Task<ReachabilityFactsBatch> GetFactsBatchAsync(
string tenantId,
IReadOnlyList<ReachabilityFactsRequest> items,
CancellationToken cancellationToken = default)
{
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
"reachability_facts.batch_lookup",
ActivityKind.Internal);
activity?.SetTag("tenant", tenantId);
activity?.SetTag("batch_size", items.Count);
var keys = items
.Select(i => new ReachabilityFactKey(tenantId, i.ComponentPurl, i.AdvisoryId))
.Distinct()
.ToList();
// Try cache first
var cacheResult = await _cache.GetBatchAsync(keys, cancellationToken).ConfigureAwait(false);
ReachabilityFactsTelemetry.RecordCacheHits(cacheResult.CacheHits);
ReachabilityFactsTelemetry.RecordCacheMisses(cacheResult.CacheMisses);
activity?.SetTag("cache_hits", cacheResult.CacheHits);
activity?.SetTag("cache_misses", cacheResult.CacheMisses);
if (cacheResult.NotFound.Count == 0)
{
// All items found in cache
return cacheResult;
}
// Fetch missing items from store
var storeResults = await _store.GetBatchAsync(cacheResult.NotFound, cancellationToken)
.ConfigureAwait(false);
activity?.SetTag("store_hits", storeResults.Count);
// Populate cache with store results
if (storeResults.Count > 0)
{
await _cache.SetBatchAsync(storeResults, cancellationToken).ConfigureAwait(false);
}
// Merge results
var allFound = new Dictionary<ReachabilityFactKey, ReachabilityFact>(cacheResult.Found);
foreach (var (key, fact) in storeResults)
{
allFound[key] = fact;
}
var stillNotFound = cacheResult.NotFound
.Where(k => !storeResults.ContainsKey(k))
.ToList();
_logger.LogDebug(
"Reachability facts lookup: {Total} requested, {CacheHits} cache hits, {StoreFetched} from store, {NotFound} not found",
keys.Count,
cacheResult.CacheHits,
storeResults.Count,
stillNotFound.Count);
return new ReachabilityFactsBatch
{
Found = allFound,
NotFound = stillNotFound,
CacheHits = cacheResult.CacheHits,
CacheMisses = cacheResult.CacheMisses,
};
}
/// <summary>
/// Gets a single reachability fact.
/// </summary>
public async Task<ReachabilityFact?> GetFactAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
// Try cache first
var (cached, cacheHit) = await _cache.GetAsync(key, cancellationToken).ConfigureAwait(false);
if (cacheHit)
{
ReachabilityFactsTelemetry.RecordCacheHits(1);
return cached;
}
ReachabilityFactsTelemetry.RecordCacheMisses(1);
// Fall back to store
var fact = await _store.GetAsync(tenantId, componentPurl, advisoryId, cancellationToken)
.ConfigureAwait(false);
if (fact != null)
{
await _cache.SetAsync(key, fact, cancellationToken).ConfigureAwait(false);
}
return fact;
}
/// <summary>
/// Enriches signal context with reachability facts.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="componentPurl">Component PURL.</param>
/// <param name="advisoryId">Advisory ID.</param>
/// <param name="signals">Signal context to enrich.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if reachability fact was found and applied.</returns>
public async Task<bool> EnrichSignalsAsync(
string tenantId,
string componentPurl,
string advisoryId,
IDictionary<string, object?> signals,
CancellationToken cancellationToken = default)
{
var fact = await GetFactAsync(tenantId, componentPurl, advisoryId, cancellationToken)
.ConfigureAwait(false);
if (fact == null)
{
// Set default unknown state
signals["reachability"] = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["state"] = "unknown",
["confidence"] = 0m,
["score"] = 0m,
["has_runtime_evidence"] = false,
};
return false;
}
signals["reachability"] = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["state"] = fact.State.ToString().ToLowerInvariant(),
["confidence"] = fact.Confidence,
["score"] = fact.Score,
["has_runtime_evidence"] = fact.HasRuntimeEvidence,
["source"] = fact.Source,
["method"] = fact.Method.ToString().ToLowerInvariant(),
};
ReachabilityFactsTelemetry.RecordFactApplied(fact.State.ToString().ToLowerInvariant());
return true;
}
/// <summary>
/// Saves a new reachability fact and updates the cache.
/// </summary>
public async Task SaveFactAsync(
ReachabilityFact fact,
CancellationToken cancellationToken = default)
{
await _store.SaveAsync(fact, cancellationToken).ConfigureAwait(false);
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
await _cache.SetAsync(key, fact, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Saved reachability fact: {TenantId}/{ComponentPurl}/{AdvisoryId} = {State} ({Confidence:P0})",
fact.TenantId,
fact.ComponentPurl,
fact.AdvisoryId,
fact.State,
fact.Confidence);
}
/// <summary>
/// Invalidates cache entries when reachability facts are updated externally.
/// </summary>
public Task InvalidateCacheAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
return _cache.InvalidateAsync(key, cancellationToken);
}
/// <summary>
/// Gets cache statistics.
/// </summary>
public ReachabilityFactsCacheStats GetCacheStats() => _cache.GetStats();
}
/// <summary>
/// Request item for batch reachability facts lookup.
/// </summary>
public sealed record ReachabilityFactsRequest(string ComponentPurl, string AdvisoryId);
/// <summary>
/// Telemetry for reachability facts operations.
/// Delegates to PolicyEngineTelemetry for centralized metrics.
/// </summary>
public static class ReachabilityFactsTelemetry
{
/// <summary>
/// Records cache hits.
/// </summary>
public static void RecordCacheHits(int count)
{
PolicyEngineTelemetry.RecordReachabilityCacheHits(count);
}
/// <summary>
/// Records cache misses.
/// </summary>
public static void RecordCacheMisses(int count)
{
PolicyEngineTelemetry.RecordReachabilityCacheMisses(count);
}
/// <summary>
/// Records a reachability fact being applied.
/// </summary>
public static void RecordFactApplied(string state)
{
PolicyEngineTelemetry.RecordReachabilityApplied(state);
}
/// <summary>
/// Gets the current cache hit ratio from stats.
/// </summary>
public static double GetCacheHitRatio(ReachabilityFactsCacheStats stats)
{
return stats.HitRatio;
}
}

View File

@@ -0,0 +1,258 @@
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Represents a reachability fact for a component-vulnerability pair.
/// </summary>
public sealed record ReachabilityFact
{
/// <summary>
/// Unique identifier for this reachability fact.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
/// <summary>
/// Component PURL this fact applies to.
/// </summary>
[JsonPropertyName("component_purl")]
public required string ComponentPurl { get; init; }
/// <summary>
/// Vulnerability/advisory identifier (CVE, GHSA, etc.).
/// </summary>
[JsonPropertyName("advisory_id")]
public required string AdvisoryId { get; init; }
/// <summary>
/// Reachability state (reachable, unreachable, unknown, under_investigation).
/// </summary>
[JsonPropertyName("state")]
public required ReachabilityState State { get; init; }
/// <summary>
/// Confidence score (0.0 to 1.0).
/// </summary>
[JsonPropertyName("confidence")]
public required decimal Confidence { get; init; }
/// <summary>
/// Reachability score (0.0 to 1.0, higher = more reachable).
/// </summary>
[JsonPropertyName("score")]
public decimal Score { get; init; }
/// <summary>
/// Whether this fact has runtime evidence backing it.
/// </summary>
[JsonPropertyName("has_runtime_evidence")]
public bool HasRuntimeEvidence { get; init; }
/// <summary>
/// Source of the reachability analysis.
/// </summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>
/// Analysis method used (static, dynamic, hybrid).
/// </summary>
[JsonPropertyName("method")]
public required AnalysisMethod Method { get; init; }
/// <summary>
/// Reference to the call graph or evidence artifact.
/// </summary>
[JsonPropertyName("evidence_ref")]
public string? EvidenceRef { get; init; }
/// <summary>
/// Content hash of the analysis evidence.
/// </summary>
[JsonPropertyName("evidence_hash")]
public string? EvidenceHash { get; init; }
/// <summary>
/// Timestamp when this fact was computed.
/// </summary>
[JsonPropertyName("computed_at")]
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Timestamp when this fact expires and should be recomputed.
/// </summary>
[JsonPropertyName("expires_at")]
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
[JsonPropertyName("metadata")]
public Dictionary<string, object?>? Metadata { get; init; }
}
/// <summary>
/// Reachability state enumeration aligned with VEX status semantics.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ReachabilityState>))]
public enum ReachabilityState
{
/// <summary>
/// The vulnerable code path is reachable from application entry points.
/// </summary>
[JsonPropertyName("reachable")]
Reachable,
/// <summary>
/// The vulnerable code path is not reachable from application entry points.
/// </summary>
[JsonPropertyName("unreachable")]
Unreachable,
/// <summary>
/// Reachability status is unknown or could not be determined.
/// </summary>
[JsonPropertyName("unknown")]
Unknown,
/// <summary>
/// Reachability is under investigation and may change.
/// </summary>
[JsonPropertyName("under_investigation")]
UnderInvestigation,
}
/// <summary>
/// Analysis method enumeration.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<AnalysisMethod>))]
public enum AnalysisMethod
{
/// <summary>
/// Static analysis (call graph, data flow).
/// </summary>
[JsonPropertyName("static")]
Static,
/// <summary>
/// Dynamic analysis (runtime profiling, instrumentation).
/// </summary>
[JsonPropertyName("dynamic")]
Dynamic,
/// <summary>
/// Hybrid approach combining static and dynamic analysis.
/// </summary>
[JsonPropertyName("hybrid")]
Hybrid,
/// <summary>
/// Manual assessment or expert judgment.
/// </summary>
[JsonPropertyName("manual")]
Manual,
}
/// <summary>
/// Query parameters for fetching reachability facts.
/// </summary>
public sealed record ReachabilityFactsQuery
{
/// <summary>
/// Tenant identifier (required).
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Component PURLs to query (optional filter).
/// </summary>
public IReadOnlyList<string>? ComponentPurls { get; init; }
/// <summary>
/// Advisory IDs to query (optional filter).
/// </summary>
public IReadOnlyList<string>? AdvisoryIds { get; init; }
/// <summary>
/// Filter by reachability states (optional).
/// </summary>
public IReadOnlyList<ReachabilityState>? States { get; init; }
/// <summary>
/// Minimum confidence threshold (optional).
/// </summary>
public decimal? MinConfidence { get; init; }
/// <summary>
/// Include expired facts (default: false).
/// </summary>
public bool IncludeExpired { get; init; }
/// <summary>
/// Maximum number of results.
/// </summary>
public int Limit { get; init; } = 1000;
/// <summary>
/// Skip count for pagination.
/// </summary>
public int Skip { get; init; }
}
/// <summary>
/// Composite key for caching reachability facts.
/// </summary>
public readonly record struct ReachabilityFactKey(string TenantId, string ComponentPurl, string AdvisoryId)
{
/// <summary>
/// Creates a cache key string from this composite key.
/// </summary>
public string ToCacheKey() => $"rf:{TenantId}:{ComponentPurl}:{AdvisoryId}";
/// <summary>
/// Parses a cache key back into a composite key.
/// </summary>
public static ReachabilityFactKey? FromCacheKey(string key)
{
var parts = key.Split(':', 4);
if (parts.Length < 4 || parts[0] != "rf")
{
return null;
}
return new ReachabilityFactKey(parts[1], parts[2], parts[3]);
}
}
/// <summary>
/// Batch lookup result for reachability facts.
/// </summary>
public sealed record ReachabilityFactsBatch
{
/// <summary>
/// Facts that were found.
/// </summary>
public required IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> Found { get; init; }
/// <summary>
/// Keys that were not found.
/// </summary>
public required IReadOnlyList<ReachabilityFactKey> NotFound { get; init; }
/// <summary>
/// Number of cache hits.
/// </summary>
public int CacheHits { get; init; }
/// <summary>
/// Number of cache misses that required store lookup.
/// </summary>
public int CacheMisses { get; init; }
}

View File

@@ -0,0 +1,333 @@
using System.Collections.Concurrent;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Policy.Engine.Options;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Interface for the reachability facts overlay cache.
/// Provides fast in-memory/Redis caching layer above the persistent store.
/// </summary>
public interface IReachabilityFactsOverlayCache
{
/// <summary>
/// Gets a reachability fact from the cache.
/// </summary>
Task<(ReachabilityFact? Fact, bool CacheHit)> GetAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets multiple reachability facts from the cache.
/// </summary>
Task<ReachabilityFactsBatch> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets a reachability fact in the cache.
/// </summary>
Task SetAsync(
ReachabilityFactKey key,
ReachabilityFact fact,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets multiple reachability facts in the cache.
/// </summary>
Task SetBatchAsync(
IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> facts,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates a cache entry.
/// </summary>
Task InvalidateAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates all cache entries for a tenant.
/// </summary>
Task InvalidateTenantAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets cache statistics.
/// </summary>
ReachabilityFactsCacheStats GetStats();
}
/// <summary>
/// Cache statistics.
/// </summary>
public sealed record ReachabilityFactsCacheStats
{
public long TotalRequests { get; init; }
public long CacheHits { get; init; }
public long CacheMisses { get; init; }
public double HitRatio => TotalRequests > 0 ? (double)CacheHits / TotalRequests : 0;
public long ItemCount { get; init; }
public long EvictionCount { get; init; }
}
/// <summary>
/// In-memory implementation of the reachability facts overlay cache.
/// Uses a time-based eviction strategy with configurable TTL.
/// </summary>
public sealed class InMemoryReachabilityFactsOverlayCache : IReachabilityFactsOverlayCache
{
private readonly ConcurrentDictionary<string, CacheEntry> _cache;
private readonly TimeProvider _timeProvider;
private readonly ILogger<InMemoryReachabilityFactsOverlayCache> _logger;
private readonly TimeSpan _defaultTtl;
private readonly int _maxItems;
private long _totalRequests;
private long _cacheHits;
private long _cacheMisses;
private long _evictionCount;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public InMemoryReachabilityFactsOverlayCache(
ILogger<InMemoryReachabilityFactsOverlayCache> logger,
TimeProvider timeProvider,
IOptions<PolicyEngineOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_cache = new ConcurrentDictionary<string, CacheEntry>(StringComparer.Ordinal);
var cacheOptions = options?.Value.ReachabilityCache ?? new ReachabilityFactsCacheOptions();
_defaultTtl = TimeSpan.FromMinutes(cacheOptions.DefaultTtlMinutes);
_maxItems = cacheOptions.MaxItems;
}
public Task<(ReachabilityFact? Fact, bool CacheHit)> GetAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref _totalRequests);
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
if (_cache.TryGetValue(cacheKey, out var entry) && entry.ExpiresAt > now)
{
Interlocked.Increment(ref _cacheHits);
return Task.FromResult<(ReachabilityFact?, bool)>((entry.Fact, true));
}
Interlocked.Increment(ref _cacheMisses);
// Remove expired entry if present
if (entry != null)
{
_cache.TryRemove(cacheKey, out _);
}
return Task.FromResult<(ReachabilityFact?, bool)>((null, false));
}
public async Task<ReachabilityFactsBatch> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default)
{
var found = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
var notFound = new List<ReachabilityFactKey>();
var cacheHits = 0;
var cacheMisses = 0;
foreach (var key in keys)
{
var (fact, hit) = await GetAsync(key, cancellationToken).ConfigureAwait(false);
if (fact != null)
{
found[key] = fact;
cacheHits++;
}
else
{
notFound.Add(key);
cacheMisses++;
}
}
return new ReachabilityFactsBatch
{
Found = found,
NotFound = notFound,
CacheHits = cacheHits,
CacheMisses = cacheMisses,
};
}
public Task SetAsync(
ReachabilityFactKey key,
ReachabilityFact fact,
CancellationToken cancellationToken = default)
{
EnsureCapacity();
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
var ttl = fact.ExpiresAt.HasValue && fact.ExpiresAt.Value > now
? fact.ExpiresAt.Value - now
: _defaultTtl;
var entry = new CacheEntry(fact, now.Add(ttl));
_cache[cacheKey] = entry;
return Task.CompletedTask;
}
public Task SetBatchAsync(
IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> facts,
CancellationToken cancellationToken = default)
{
EnsureCapacity(facts.Count);
var now = _timeProvider.GetUtcNow();
foreach (var (key, fact) in facts)
{
var cacheKey = key.ToCacheKey();
var ttl = fact.ExpiresAt.HasValue && fact.ExpiresAt.Value > now
? fact.ExpiresAt.Value - now
: _defaultTtl;
var entry = new CacheEntry(fact, now.Add(ttl));
_cache[cacheKey] = entry;
}
return Task.CompletedTask;
}
public Task InvalidateAsync(ReachabilityFactKey key, CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
_cache.TryRemove(cacheKey, out _);
return Task.CompletedTask;
}
public Task InvalidateTenantAsync(string tenantId, CancellationToken cancellationToken = default)
{
var prefix = $"rf:{tenantId}:";
var keysToRemove = _cache.Keys.Where(k => k.StartsWith(prefix, StringComparison.Ordinal)).ToList();
foreach (var key in keysToRemove)
{
_cache.TryRemove(key, out _);
}
_logger.LogDebug("Invalidated {Count} cache entries for tenant {TenantId}", keysToRemove.Count, tenantId);
return Task.CompletedTask;
}
public ReachabilityFactsCacheStats GetStats()
{
return new ReachabilityFactsCacheStats
{
TotalRequests = Interlocked.Read(ref _totalRequests),
CacheHits = Interlocked.Read(ref _cacheHits),
CacheMisses = Interlocked.Read(ref _cacheMisses),
ItemCount = _cache.Count,
EvictionCount = Interlocked.Read(ref _evictionCount),
};
}
private void EnsureCapacity(int additionalItems = 1)
{
if (_cache.Count + additionalItems <= _maxItems)
{
return;
}
var now = _timeProvider.GetUtcNow();
var itemsToRemove = _cache.Count + additionalItems - _maxItems + (_maxItems / 10); // Remove 10% extra
// First, remove expired items
var expiredKeys = _cache
.Where(kvp => kvp.Value.ExpiresAt <= now)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in expiredKeys)
{
if (_cache.TryRemove(key, out _))
{
Interlocked.Increment(ref _evictionCount);
itemsToRemove--;
}
}
if (itemsToRemove <= 0)
{
return;
}
// Then, remove oldest items by expiration time
var oldestKeys = _cache
.OrderBy(kvp => kvp.Value.ExpiresAt)
.Take(itemsToRemove)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in oldestKeys)
{
if (_cache.TryRemove(key, out _))
{
Interlocked.Increment(ref _evictionCount);
}
}
_logger.LogDebug(
"Evicted {EvictedCount} cache entries (expired: {ExpiredCount}, oldest: {OldestCount})",
expiredKeys.Count + oldestKeys.Count,
expiredKeys.Count,
oldestKeys.Count);
}
private sealed record CacheEntry(ReachabilityFact Fact, DateTimeOffset ExpiresAt);
}
/// <summary>
/// Configuration options for the reachability facts cache.
/// </summary>
public sealed class ReachabilityFactsCacheOptions
{
/// <summary>
/// Default TTL for cache entries in minutes.
/// </summary>
public int DefaultTtlMinutes { get; set; } = 15;
/// <summary>
/// Maximum number of items in the cache.
/// </summary>
public int MaxItems { get; set; } = 100000;
/// <summary>
/// Whether to enable Redis as a distributed cache layer.
/// </summary>
public bool EnableRedis { get; set; }
/// <summary>
/// Redis connection string.
/// </summary>
public string? RedisConnectionString { get; set; }
/// <summary>
/// Redis key prefix for reachability facts.
/// </summary>
public string RedisKeyPrefix { get; set; } = "stellaops:rf:";
}

View File

@@ -0,0 +1,213 @@
using System.Collections.Concurrent;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Store interface for reachability facts persistence.
/// </summary>
public interface IReachabilityFactsStore
{
/// <summary>
/// Gets a single reachability fact by key.
/// </summary>
Task<ReachabilityFact?> GetAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets multiple reachability facts by keys.
/// </summary>
Task<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default);
/// <summary>
/// Queries reachability facts with filtering.
/// </summary>
Task<IReadOnlyList<ReachabilityFact>> QueryAsync(
ReachabilityFactsQuery query,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves or updates a reachability fact.
/// </summary>
Task SaveAsync(
ReachabilityFact fact,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves multiple reachability facts.
/// </summary>
Task SaveBatchAsync(
IReadOnlyList<ReachabilityFact> facts,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a reachability fact.
/// </summary>
Task DeleteAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the count of facts for a tenant.
/// </summary>
Task<long> CountAsync(
string tenantId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// In-memory implementation of the reachability facts store for development and testing.
/// </summary>
public sealed class InMemoryReachabilityFactsStore : IReachabilityFactsStore
{
private readonly ConcurrentDictionary<ReachabilityFactKey, ReachabilityFact> _facts = new();
private readonly TimeProvider _timeProvider;
public InMemoryReachabilityFactsStore(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<ReachabilityFact?> GetAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
_facts.TryGetValue(key, out var fact);
return Task.FromResult(fact);
}
public Task<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default)
{
var result = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
foreach (var key in keys)
{
if (_facts.TryGetValue(key, out var fact))
{
result[key] = fact;
}
}
return Task.FromResult<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>>(result);
}
public Task<IReadOnlyList<ReachabilityFact>> QueryAsync(
ReachabilityFactsQuery query,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var results = _facts.Values
.Where(f => f.TenantId == query.TenantId)
.Where(f => query.ComponentPurls == null || query.ComponentPurls.Contains(f.ComponentPurl))
.Where(f => query.AdvisoryIds == null || query.AdvisoryIds.Contains(f.AdvisoryId))
.Where(f => query.States == null || query.States.Contains(f.State))
.Where(f => !query.MinConfidence.HasValue || f.Confidence >= query.MinConfidence.Value)
.Where(f => query.IncludeExpired || !f.ExpiresAt.HasValue || f.ExpiresAt > now)
.OrderByDescending(f => f.ComputedAt)
.Skip(query.Skip)
.Take(query.Limit)
.ToList();
return Task.FromResult<IReadOnlyList<ReachabilityFact>>(results);
}
public Task SaveAsync(ReachabilityFact fact, CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
_facts[key] = fact;
return Task.CompletedTask;
}
public Task SaveBatchAsync(IReadOnlyList<ReachabilityFact> facts, CancellationToken cancellationToken = default)
{
foreach (var fact in facts)
{
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
_facts[key] = fact;
}
return Task.CompletedTask;
}
public Task DeleteAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
_facts.TryRemove(key, out _);
return Task.CompletedTask;
}
public Task<long> CountAsync(string tenantId, CancellationToken cancellationToken = default)
{
var count = _facts.Values.Count(f => f.TenantId == tenantId);
return Task.FromResult((long)count);
}
}
/// <summary>
/// Index definitions for MongoDB reachability_facts collection.
/// </summary>
public static class ReachabilityFactsIndexes
{
/// <summary>
/// Primary compound index for efficient lookups.
/// </summary>
public const string PrimaryIndex = "tenant_component_advisory";
/// <summary>
/// Index for querying by tenant and state.
/// </summary>
public const string TenantStateIndex = "tenant_state_computed";
/// <summary>
/// Index for TTL expiration.
/// </summary>
public const string ExpirationIndex = "expires_at_ttl";
/// <summary>
/// Gets the index definitions for creating MongoDB indexes.
/// </summary>
public static IReadOnlyList<ReachabilityIndexDefinition> GetIndexDefinitions()
{
return new[]
{
new ReachabilityIndexDefinition(
PrimaryIndex,
new[] { "tenant_id", "component_purl", "advisory_id" },
Unique: true),
new ReachabilityIndexDefinition(
TenantStateIndex,
new[] { "tenant_id", "state", "computed_at" },
Unique: false),
new ReachabilityIndexDefinition(
ExpirationIndex,
new[] { "expires_at" },
Unique: false,
ExpireAfterSeconds: 0),
};
}
}
/// <summary>
/// Index definition for MongoDB collection.
/// </summary>
public sealed record ReachabilityIndexDefinition(
string Name,
IReadOnlyList<string> Fields,
bool Unique,
int? ExpireAfterSeconds = null);

View File

@@ -0,0 +1,308 @@
using System.Collections.Immutable;
namespace StellaOps.Policy.Engine.SelectionJoin;
/// <summary>
/// PURL equivalence table for mapping package identifiers across ecosystems.
/// Enables matching when the same package has different identifiers in
/// different sources (e.g., npm vs GitHub advisory database naming).
/// </summary>
public sealed class PurlEquivalenceTable
{
private readonly ImmutableDictionary<string, ImmutableHashSet<string>> _equivalenceGroups;
private readonly ImmutableDictionary<string, string> _canonicalMapping;
private PurlEquivalenceTable(
ImmutableDictionary<string, ImmutableHashSet<string>> equivalenceGroups,
ImmutableDictionary<string, string> canonicalMapping)
{
_equivalenceGroups = equivalenceGroups;
_canonicalMapping = canonicalMapping;
}
/// <summary>
/// Creates an empty equivalence table.
/// </summary>
public static PurlEquivalenceTable Empty { get; } = new(
ImmutableDictionary<string, ImmutableHashSet<string>>.Empty,
ImmutableDictionary<string, string>.Empty);
/// <summary>
/// Creates an equivalence table from a list of equivalence groups.
/// Each group contains PURLs that should be considered equivalent.
/// </summary>
public static PurlEquivalenceTable FromGroups(IEnumerable<IEnumerable<string>> groups)
{
var equivalenceBuilder = ImmutableDictionary.CreateBuilder<string, ImmutableHashSet<string>>(
StringComparer.OrdinalIgnoreCase);
var canonicalBuilder = ImmutableDictionary.CreateBuilder<string, string>(
StringComparer.OrdinalIgnoreCase);
foreach (var group in groups)
{
var normalizedGroup = group
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => p.Trim().ToLowerInvariant())
.Distinct()
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
if (normalizedGroup.Count < 2)
{
continue;
}
// First item (lexicographically) is the canonical form
var canonical = normalizedGroup.First();
foreach (var purl in normalizedGroup)
{
equivalenceBuilder[purl] = normalizedGroup;
canonicalBuilder[purl] = canonical;
}
}
return new PurlEquivalenceTable(
equivalenceBuilder.ToImmutable(),
canonicalBuilder.ToImmutable());
}
/// <summary>
/// Gets the canonical form of a PURL, or the original if not in the table.
/// </summary>
public string GetCanonical(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return string.Empty;
}
var normalized = purl.Trim().ToLowerInvariant();
return _canonicalMapping.TryGetValue(normalized, out var canonical)
? canonical
: normalized;
}
/// <summary>
/// Gets all equivalent PURLs for a given PURL.
/// </summary>
public IReadOnlySet<string> GetEquivalents(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return ImmutableHashSet<string>.Empty;
}
var normalized = purl.Trim().ToLowerInvariant();
return _equivalenceGroups.TryGetValue(normalized, out var group)
? group
: ImmutableHashSet.Create(StringComparer.OrdinalIgnoreCase, normalized);
}
/// <summary>
/// Checks if two PURLs are equivalent.
/// </summary>
public bool AreEquivalent(string purl1, string purl2)
{
if (string.IsNullOrWhiteSpace(purl1) || string.IsNullOrWhiteSpace(purl2))
{
return false;
}
var norm1 = purl1.Trim().ToLowerInvariant();
var norm2 = purl2.Trim().ToLowerInvariant();
if (string.Equals(norm1, norm2, StringComparison.Ordinal))
{
return true;
}
var canonical1 = GetCanonical(norm1);
var canonical2 = GetCanonical(norm2);
return string.Equals(canonical1, canonical2, StringComparison.Ordinal);
}
/// <summary>
/// Number of equivalence groups in the table.
/// </summary>
public int GroupCount => _equivalenceGroups
.Values
.Select(g => g.First())
.Distinct()
.Count();
/// <summary>
/// Total number of PURLs in the table.
/// </summary>
public int TotalEntries => _canonicalMapping.Count;
}
/// <summary>
/// Static utilities for PURL equivalence matching.
/// </summary>
public static class PurlEquivalence
{
/// <summary>
/// Extracts the package key from a PURL (removes version suffix).
/// Example: "pkg:npm/lodash@4.17.21" → "pkg:npm/lodash"
/// </summary>
public static string ExtractPackageKey(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return string.Empty;
}
var trimmed = purl.Trim();
var atIndex = trimmed.LastIndexOf('@');
// Handle case where @ is part of namespace (e.g., pkg:npm/@scope/package@1.0.0)
if (atIndex > 0)
{
// Check if there's another @ before this one (scoped package)
var firstAt = trimmed.IndexOf('@');
if (firstAt < atIndex)
{
// This is a scoped package, @ at atIndex is the version separator
return trimmed[..atIndex];
}
// Check if we have a proper version after @
var afterAt = trimmed[(atIndex + 1)..];
if (afterAt.Length > 0 && (char.IsDigit(afterAt[0]) || afterAt[0] == 'v'))
{
return trimmed[..atIndex];
}
}
return trimmed;
}
/// <summary>
/// Extracts the ecosystem from a PURL.
/// Example: "pkg:npm/lodash@4.17.21" → "npm"
/// </summary>
public static string? ExtractEcosystem(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var trimmed = purl.Trim();
if (!trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return null;
}
var afterPrefix = trimmed[4..]; // Skip "pkg:"
var slashIndex = afterPrefix.IndexOf('/');
return slashIndex > 0 ? afterPrefix[..slashIndex] : null;
}
/// <summary>
/// Extracts the namespace from a PURL (if present).
/// Example: "pkg:npm/@scope/package@1.0.0" → "@scope"
/// </summary>
public static string? ExtractNamespace(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var trimmed = purl.Trim();
if (!trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return null;
}
var afterPrefix = trimmed[4..];
var slashIndex = afterPrefix.IndexOf('/');
if (slashIndex < 0)
{
return null;
}
var afterEcosystem = afterPrefix[(slashIndex + 1)..];
var nextSlashIndex = afterEcosystem.IndexOf('/');
if (nextSlashIndex > 0)
{
// Has namespace
return afterEcosystem[..nextSlashIndex];
}
return null;
}
/// <summary>
/// Extracts the package name from a PURL.
/// Example: "pkg:npm/@scope/package@1.0.0" → "package"
/// </summary>
public static string? ExtractName(string purl)
{
var packageKey = ExtractPackageKey(purl);
if (string.IsNullOrWhiteSpace(packageKey))
{
return null;
}
var lastSlashIndex = packageKey.LastIndexOf('/');
return lastSlashIndex >= 0 ? packageKey[(lastSlashIndex + 1)..] : null;
}
/// <summary>
/// Computes match confidence between two PURLs.
/// Returns 1.0 for exact match, 0.8 for package key match, 0.0 for no match.
/// </summary>
public static double ComputeMatchConfidence(string purl1, string purl2, PurlEquivalenceTable? equivalenceTable = null)
{
if (string.IsNullOrWhiteSpace(purl1) || string.IsNullOrWhiteSpace(purl2))
{
return 0.0;
}
var norm1 = purl1.Trim().ToLowerInvariant();
var norm2 = purl2.Trim().ToLowerInvariant();
// Exact match
if (string.Equals(norm1, norm2, StringComparison.Ordinal))
{
return 1.0;
}
// Equivalence table match
if (equivalenceTable is not null && equivalenceTable.AreEquivalent(norm1, norm2))
{
return 0.95;
}
// Package key match (same package, different version)
var key1 = ExtractPackageKey(norm1);
var key2 = ExtractPackageKey(norm2);
if (!string.IsNullOrEmpty(key1) && string.Equals(key1, key2, StringComparison.OrdinalIgnoreCase))
{
return 0.8;
}
// Same ecosystem and name (different namespace)
var eco1 = ExtractEcosystem(norm1);
var eco2 = ExtractEcosystem(norm2);
var name1 = ExtractName(norm1);
var name2 = ExtractName(norm2);
if (!string.IsNullOrEmpty(eco1) &&
string.Equals(eco1, eco2, StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(name1) &&
string.Equals(name1, name2, StringComparison.OrdinalIgnoreCase))
{
return 0.5;
}
return 0.0;
}
}

View File

@@ -0,0 +1,192 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Policy.Engine.SelectionJoin;
/// <summary>
/// Represents an SBOM component for selection joining.
/// </summary>
/// <param name="Purl">Package URL (e.g., pkg:npm/lodash@4.17.21).</param>
/// <param name="Name">Component name.</param>
/// <param name="Version">Component version.</param>
/// <param name="Ecosystem">Package ecosystem (npm, maven, pypi, etc.).</param>
/// <param name="Metadata">Additional component metadata.</param>
public sealed record SbomComponentInput(
string Purl,
string Name,
string Version,
string? Ecosystem,
ImmutableDictionary<string, string> Metadata)
{
/// <summary>
/// Extracts the package key from the PURL (removes version suffix).
/// </summary>
public string PackageKey => PurlEquivalence.ExtractPackageKey(Purl);
}
/// <summary>
/// Represents an advisory linkset reference for selection joining.
/// </summary>
/// <param name="AdvisoryId">Advisory identifier (CVE, GHSA, etc.).</param>
/// <param name="Source">Advisory source.</param>
/// <param name="Purls">Affected PURLs from the advisory.</param>
/// <param name="Cpes">Affected CPEs from the advisory.</param>
/// <param name="Aliases">Advisory aliases (e.g., CVE-2021-1234, GHSA-xxxx).</param>
/// <param name="Confidence">Linkset confidence score.</param>
public sealed record AdvisoryLinksetInput(
string AdvisoryId,
string Source,
ImmutableArray<string> Purls,
ImmutableArray<string> Cpes,
ImmutableArray<string> Aliases,
double? Confidence);
/// <summary>
/// Represents a VEX linkset reference for selection joining.
/// </summary>
/// <param name="LinksetId">VEX linkset identifier.</param>
/// <param name="VulnerabilityId">Vulnerability identifier.</param>
/// <param name="ProductKey">Product key (PURL or CPE).</param>
/// <param name="Status">VEX status (not_affected, affected, fixed, under_investigation).</param>
/// <param name="Justification">VEX justification.</param>
/// <param name="Confidence">Linkset confidence level.</param>
public sealed record VexLinksetInput(
string LinksetId,
string VulnerabilityId,
string ProductKey,
string Status,
string? Justification,
VexConfidenceLevel Confidence);
/// <summary>
/// VEX confidence level enumeration.
/// </summary>
public enum VexConfidenceLevel
{
Low = 0,
Medium = 1,
High = 2
}
/// <summary>
/// Represents a resolved SBOM↔Advisory↔VEX tuple.
/// </summary>
/// <param name="TupleId">Deterministic identifier for this tuple.</param>
/// <param name="Component">The SBOM component.</param>
/// <param name="Advisory">The matched advisory linkset.</param>
/// <param name="Vex">The matched VEX linkset (if any).</param>
/// <param name="MatchType">How the match was determined.</param>
/// <param name="MatchConfidence">Overall confidence in the match.</param>
public sealed record SelectionJoinTuple(
string TupleId,
SbomComponentInput Component,
AdvisoryLinksetInput Advisory,
VexLinksetInput? Vex,
SelectionMatchType MatchType,
double MatchConfidence)
{
/// <summary>
/// Creates a deterministic tuple ID from the key components.
/// </summary>
public static string CreateTupleId(string tenantId, string componentPurl, string advisoryId)
{
var normalizedTenant = (tenantId ?? string.Empty).Trim().ToLowerInvariant();
var normalizedPurl = (componentPurl ?? string.Empty).Trim().ToLowerInvariant();
var normalizedAdvisory = (advisoryId ?? string.Empty).Trim();
var input = $"{normalizedTenant}|{normalizedPurl}|{normalizedAdvisory}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"tuple:sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}
/// <summary>
/// How the selection match was determined.
/// </summary>
public enum SelectionMatchType
{
/// <summary>Exact PURL match.</summary>
ExactPurl,
/// <summary>Package key match (same package, different version).</summary>
PackageKeyMatch,
/// <summary>CPE vendor/product match.</summary>
CpeMatch,
/// <summary>Alias-based match.</summary>
AliasMatch,
/// <summary>Equivalence table match.</summary>
EquivalenceMatch,
/// <summary>No direct match, linked via advisory reference.</summary>
IndirectMatch
}
/// <summary>
/// Input for a selection join batch operation.
/// </summary>
/// <param name="TenantId">Tenant identifier.</param>
/// <param name="BatchId">Unique batch identifier for tracing.</param>
/// <param name="Components">SBOM components to match.</param>
/// <param name="Advisories">Advisory linksets to match against.</param>
/// <param name="VexLinksets">VEX linksets to include.</param>
/// <param name="EquivalenceTable">Optional PURL equivalence mappings.</param>
/// <param name="Options">Batch processing options.</param>
public sealed record SelectionJoinBatchInput(
string TenantId,
string BatchId,
ImmutableArray<SbomComponentInput> Components,
ImmutableArray<AdvisoryLinksetInput> Advisories,
ImmutableArray<VexLinksetInput> VexLinksets,
PurlEquivalenceTable? EquivalenceTable,
SelectionJoinOptions Options);
/// <summary>
/// Options for selection join batch processing.
/// </summary>
/// <param name="MaxBatchSize">Maximum items per batch for deterministic chunking.</param>
/// <param name="IncludeIndirectMatches">Include indirect matches via advisory references.</param>
/// <param name="MinConfidenceThreshold">Minimum confidence to include in results.</param>
public sealed record SelectionJoinOptions(
int MaxBatchSize = 1000,
bool IncludeIndirectMatches = false,
double MinConfidenceThreshold = 0.0);
/// <summary>
/// Result of a selection join batch operation.
/// </summary>
/// <param name="BatchId">Batch identifier for tracing.</param>
/// <param name="Tuples">Resolved tuples.</param>
/// <param name="UnmatchedComponents">Components with no advisory matches.</param>
/// <param name="Statistics">Batch statistics.</param>
public sealed record SelectionJoinBatchResult(
string BatchId,
ImmutableArray<SelectionJoinTuple> Tuples,
ImmutableArray<SbomComponentInput> UnmatchedComponents,
SelectionJoinStatistics Statistics);
/// <summary>
/// Statistics for a selection join batch.
/// </summary>
/// <param name="TotalComponents">Total components in input.</param>
/// <param name="TotalAdvisories">Total advisories in input.</param>
/// <param name="MatchedTuples">Number of matched tuples.</param>
/// <param name="ExactPurlMatches">Exact PURL matches.</param>
/// <param name="PackageKeyMatches">Package key matches.</param>
/// <param name="CpeMatches">CPE matches.</param>
/// <param name="EquivalenceMatches">Equivalence table matches.</param>
/// <param name="VexOverlays">Tuples with VEX overlays.</param>
/// <param name="ProcessingTimeMs">Processing time in milliseconds.</param>
public sealed record SelectionJoinStatistics(
int TotalComponents,
int TotalAdvisories,
int MatchedTuples,
int ExactPurlMatches,
int PackageKeyMatches,
int CpeMatches,
int EquivalenceMatches,
int VexOverlays,
long ProcessingTimeMs);

View File

@@ -0,0 +1,390 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.SelectionJoin;
/// <summary>
/// Service for resolving SBOM↔Advisory↔VEX tuples using linksets and PURL equivalence.
/// All operations are deterministic: given identical inputs, produces identical outputs.
/// </summary>
public sealed class SelectionJoinService
{
/// <summary>
/// Resolves SBOM components against advisory and VEX linksets.
/// Uses deterministic batching for large datasets.
/// </summary>
public SelectionJoinBatchResult ResolveTuples(SelectionJoinBatchInput input)
{
ArgumentNullException.ThrowIfNull(input);
var stopwatch = Stopwatch.StartNew();
var equivalenceTable = input.EquivalenceTable ?? PurlEquivalenceTable.Empty;
var options = input.Options;
// Build lookup indexes for deterministic matching
var advisoryIndex = BuildAdvisoryIndex(input.Advisories);
var vexIndex = BuildVexIndex(input.VexLinksets);
// Process components in deterministic order
var orderedComponents = input.Components
.OrderBy(c => c.Purl, StringComparer.OrdinalIgnoreCase)
.ThenBy(c => c.Name, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var tuples = new List<SelectionJoinTuple>();
var unmatched = new List<SbomComponentInput>();
var stats = new SelectionJoinStatsBuilder();
stats.TotalComponents = orderedComponents.Length;
stats.TotalAdvisories = input.Advisories.Length;
// Process in batches for memory efficiency
var batches = CreateDeterministicBatches(orderedComponents, options.MaxBatchSize);
foreach (var batch in batches)
{
ProcessBatch(
batch,
input.TenantId,
advisoryIndex,
vexIndex,
equivalenceTable,
options,
tuples,
unmatched,
stats);
}
stopwatch.Stop();
stats.ProcessingTimeMs = stopwatch.ElapsedMilliseconds;
// Sort results for deterministic output
var sortedTuples = tuples
.OrderBy(t => t.Component.Purl, StringComparer.OrdinalIgnoreCase)
.ThenBy(t => t.Advisory.AdvisoryId, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var sortedUnmatched = unmatched
.OrderBy(c => c.Purl, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return new SelectionJoinBatchResult(
input.BatchId,
sortedTuples,
sortedUnmatched,
stats.Build());
}
private static void ProcessBatch(
IReadOnlyList<SbomComponentInput> components,
string tenantId,
AdvisoryIndex advisoryIndex,
VexIndex vexIndex,
PurlEquivalenceTable equivalenceTable,
SelectionJoinOptions options,
List<SelectionJoinTuple> tuples,
List<SbomComponentInput> unmatched,
SelectionJoinStatsBuilder stats)
{
foreach (var component in components)
{
var matches = FindAdvisoryMatches(component, advisoryIndex, equivalenceTable, options);
if (matches.Count == 0)
{
unmatched.Add(component);
continue;
}
foreach (var (advisory, matchType, confidence) in matches)
{
if (confidence < options.MinConfidenceThreshold)
{
continue;
}
// Find matching VEX linkset
var vex = FindVexMatch(component, advisory, vexIndex);
var tupleId = SelectionJoinTuple.CreateTupleId(
tenantId,
component.Purl,
advisory.AdvisoryId);
var tuple = new SelectionJoinTuple(
tupleId,
component,
advisory,
vex,
matchType,
confidence);
tuples.Add(tuple);
// Update statistics
stats.MatchedTuples++;
switch (matchType)
{
case SelectionMatchType.ExactPurl:
stats.ExactPurlMatches++;
break;
case SelectionMatchType.PackageKeyMatch:
stats.PackageKeyMatches++;
break;
case SelectionMatchType.CpeMatch:
stats.CpeMatches++;
break;
case SelectionMatchType.EquivalenceMatch:
stats.EquivalenceMatches++;
break;
}
if (vex is not null)
{
stats.VexOverlays++;
}
}
}
}
private static IReadOnlyList<(AdvisoryLinksetInput Advisory, SelectionMatchType MatchType, double Confidence)> FindAdvisoryMatches(
SbomComponentInput component,
AdvisoryIndex index,
PurlEquivalenceTable equivalenceTable,
SelectionJoinOptions options)
{
var matches = new List<(AdvisoryLinksetInput, SelectionMatchType, double)>();
var componentPurl = component.Purl.ToLowerInvariant();
var componentKey = component.PackageKey.ToLowerInvariant();
// 1. Exact PURL match (highest confidence)
if (index.ByExactPurl.TryGetValue(componentPurl, out var exactMatches))
{
foreach (var advisory in exactMatches)
{
var confidence = ComputeFinalConfidence(1.0, advisory.Confidence);
matches.Add((advisory, SelectionMatchType.ExactPurl, confidence));
}
}
// 2. Package key match (same package, possibly different version)
if (index.ByPackageKey.TryGetValue(componentKey, out var keyMatches))
{
foreach (var advisory in keyMatches)
{
// Skip if already matched by exact PURL
if (matches.Any(m => m.Item1.AdvisoryId == advisory.AdvisoryId))
{
continue;
}
var confidence = ComputeFinalConfidence(0.8, advisory.Confidence);
matches.Add((advisory, SelectionMatchType.PackageKeyMatch, confidence));
}
}
// 3. Equivalence table match
var equivalents = equivalenceTable.GetEquivalents(componentPurl);
foreach (var equivalent in equivalents)
{
if (string.Equals(equivalent, componentPurl, StringComparison.OrdinalIgnoreCase))
{
continue;
}
var equivalentKey = PurlEquivalence.ExtractPackageKey(equivalent).ToLowerInvariant();
if (index.ByPackageKey.TryGetValue(equivalentKey, out var equivMatches))
{
foreach (var advisory in equivMatches)
{
if (matches.Any(m => m.Item1.AdvisoryId == advisory.AdvisoryId))
{
continue;
}
var confidence = ComputeFinalConfidence(0.9, advisory.Confidence);
matches.Add((advisory, SelectionMatchType.EquivalenceMatch, confidence));
}
}
}
// Sort matches by confidence (descending) for deterministic ordering
return matches
.OrderByDescending(m => m.Item3)
.ThenBy(m => m.Item1.AdvisoryId, StringComparer.OrdinalIgnoreCase)
.ToList();
}
private static VexLinksetInput? FindVexMatch(
SbomComponentInput component,
AdvisoryLinksetInput advisory,
VexIndex vexIndex)
{
// Try exact vulnerability ID + product key match
foreach (var alias in advisory.Aliases)
{
var key = $"{alias.ToLowerInvariant()}|{component.Purl.ToLowerInvariant()}";
if (vexIndex.ByVulnAndProduct.TryGetValue(key, out var vex))
{
return vex;
}
// Try package key match
var pkgKey = $"{alias.ToLowerInvariant()}|{component.PackageKey.ToLowerInvariant()}";
if (vexIndex.ByVulnAndPackageKey.TryGetValue(pkgKey, out vex))
{
return vex;
}
}
// Try advisory ID directly
var directKey = $"{advisory.AdvisoryId.ToLowerInvariant()}|{component.Purl.ToLowerInvariant()}";
if (vexIndex.ByVulnAndProduct.TryGetValue(directKey, out var directVex))
{
return directVex;
}
return null;
}
private static double ComputeFinalConfidence(double matchConfidence, double? linksetConfidence)
{
var linkset = linksetConfidence ?? 1.0;
// Geometric mean of match confidence and linkset confidence
return Math.Sqrt(matchConfidence * linkset);
}
private static AdvisoryIndex BuildAdvisoryIndex(ImmutableArray<AdvisoryLinksetInput> advisories)
{
var byExactPurl = new Dictionary<string, List<AdvisoryLinksetInput>>(StringComparer.OrdinalIgnoreCase);
var byPackageKey = new Dictionary<string, List<AdvisoryLinksetInput>>(StringComparer.OrdinalIgnoreCase);
foreach (var advisory in advisories)
{
foreach (var purl in advisory.Purls)
{
var normalizedPurl = purl.ToLowerInvariant();
var packageKey = PurlEquivalence.ExtractPackageKey(normalizedPurl);
if (!byExactPurl.TryGetValue(normalizedPurl, out var exactList))
{
exactList = new List<AdvisoryLinksetInput>();
byExactPurl[normalizedPurl] = exactList;
}
exactList.Add(advisory);
if (!string.IsNullOrEmpty(packageKey))
{
if (!byPackageKey.TryGetValue(packageKey, out var keyList))
{
keyList = new List<AdvisoryLinksetInput>();
byPackageKey[packageKey] = keyList;
}
// Avoid duplicates in the same advisory
if (!keyList.Any(a => a.AdvisoryId == advisory.AdvisoryId))
{
keyList.Add(advisory);
}
}
}
}
return new AdvisoryIndex(
byExactPurl.ToImmutableDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray(),
StringComparer.OrdinalIgnoreCase),
byPackageKey.ToImmutableDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray(),
StringComparer.OrdinalIgnoreCase));
}
private static VexIndex BuildVexIndex(ImmutableArray<VexLinksetInput> vexLinksets)
{
var byVulnAndProduct = new Dictionary<string, VexLinksetInput>(StringComparer.OrdinalIgnoreCase);
var byVulnAndPackageKey = new Dictionary<string, VexLinksetInput>(StringComparer.OrdinalIgnoreCase);
foreach (var vex in vexLinksets)
{
var vulnKey = vex.VulnerabilityId.ToLowerInvariant();
var productKey = vex.ProductKey.ToLowerInvariant();
var packageKey = PurlEquivalence.ExtractPackageKey(productKey);
var exactKey = $"{vulnKey}|{productKey}";
byVulnAndProduct.TryAdd(exactKey, vex);
if (!string.IsNullOrEmpty(packageKey))
{
var pkgLookupKey = $"{vulnKey}|{packageKey}";
byVulnAndPackageKey.TryAdd(pkgLookupKey, vex);
}
}
return new VexIndex(
byVulnAndProduct.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase),
byVulnAndPackageKey.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase));
}
private static IReadOnlyList<IReadOnlyList<SbomComponentInput>> CreateDeterministicBatches(
ImmutableArray<SbomComponentInput> components,
int batchSize)
{
if (batchSize <= 0)
{
batchSize = 1000;
}
var batches = new List<IReadOnlyList<SbomComponentInput>>();
for (var i = 0; i < components.Length; i += batchSize)
{
var remaining = components.Length - i;
var count = Math.Min(batchSize, remaining);
var batch = new List<SbomComponentInput>(count);
for (var j = 0; j < count; j++)
{
batch.Add(components[i + j]);
}
batches.Add(batch);
}
return batches;
}
private sealed record AdvisoryIndex(
ImmutableDictionary<string, ImmutableArray<AdvisoryLinksetInput>> ByExactPurl,
ImmutableDictionary<string, ImmutableArray<AdvisoryLinksetInput>> ByPackageKey);
private sealed record VexIndex(
ImmutableDictionary<string, VexLinksetInput> ByVulnAndProduct,
ImmutableDictionary<string, VexLinksetInput> ByVulnAndPackageKey);
private sealed class SelectionJoinStatsBuilder
{
public int TotalComponents { get; set; }
public int TotalAdvisories { get; set; }
public int MatchedTuples { get; set; }
public int ExactPurlMatches { get; set; }
public int PackageKeyMatches { get; set; }
public int CpeMatches { get; set; }
public int EquivalenceMatches { get; set; }
public int VexOverlays { get; set; }
public long ProcessingTimeMs { get; set; }
public SelectionJoinStatistics Build() => new(
TotalComponents,
TotalAdvisories,
MatchedTuples,
ExactPurlMatches,
PackageKeyMatches,
CpeMatches,
EquivalenceMatches,
VexOverlays,
ProcessingTimeMs);
}
}

View File

@@ -0,0 +1,212 @@
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Violations;
namespace StellaOps.Policy.Engine.Services;
/// <summary>
/// API/SDK utilities for consumers to request policy decisions with source evidence summaries (POLICY-ENGINE-40-003).
/// Combines policy evaluation with severity fusion, conflict detection, and evidence summaries.
/// </summary>
internal sealed class PolicyDecisionService
{
private readonly ViolationEventService _eventService;
private readonly SeverityFusionService _fusionService;
private readonly ConflictHandlingService _conflictService;
private readonly EvidenceSummaryService _evidenceService;
public PolicyDecisionService(
ViolationEventService eventService,
SeverityFusionService fusionService,
ConflictHandlingService conflictService,
EvidenceSummaryService evidenceService)
{
_eventService = eventService ?? throw new ArgumentNullException(nameof(eventService));
_fusionService = fusionService ?? throw new ArgumentNullException(nameof(fusionService));
_conflictService = conflictService ?? throw new ArgumentNullException(nameof(conflictService));
_evidenceService = evidenceService ?? throw new ArgumentNullException(nameof(evidenceService));
}
/// <summary>
/// Request policy decisions with source evidence summaries for a given snapshot.
/// </summary>
public async Task<PolicyDecisionResponse> GetDecisionsAsync(
PolicyDecisionRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (string.IsNullOrWhiteSpace(request.SnapshotId))
{
throw new ArgumentException("snapshot_id is required", nameof(request));
}
// 1. Emit violation events from snapshot
var eventRequest = new ViolationEventRequest(request.SnapshotId);
await _eventService.EmitAsync(eventRequest, cancellationToken).ConfigureAwait(false);
// 2. Get fused severities with sources
var fused = await _fusionService.FuseAsync(request.SnapshotId, cancellationToken).ConfigureAwait(false);
// 3. Compute conflicts
var conflicts = await _conflictService.ComputeAsync(request.SnapshotId, fused, cancellationToken).ConfigureAwait(false);
// 4. Build decision items with evidence summaries
var decisions = BuildDecisionItems(request, fused, conflicts);
// 5. Build summary statistics
var summary = BuildSummary(decisions, fused);
return new PolicyDecisionResponse(
SnapshotId: request.SnapshotId,
Decisions: decisions,
Summary: summary);
}
private IReadOnlyList<PolicyDecisionItem> BuildDecisionItems(
PolicyDecisionRequest request,
IReadOnlyList<SeverityFusionResult> fused,
IReadOnlyList<ConflictRecord> conflicts)
{
var conflictLookup = conflicts
.GroupBy(c => (c.ComponentPurl, c.AdvisoryId))
.ToDictionary(
g => g.Key,
g => g.Sum(c => c.Conflicts.Count));
var items = new List<PolicyDecisionItem>(fused.Count);
foreach (var fusion in fused)
{
// Apply filters if specified
if (!string.IsNullOrWhiteSpace(request.TenantId) &&
!string.Equals(fusion.TenantId, request.TenantId, StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (!string.IsNullOrWhiteSpace(request.ComponentPurl) &&
!string.Equals(fusion.ComponentPurl, request.ComponentPurl, StringComparison.Ordinal))
{
continue;
}
if (!string.IsNullOrWhiteSpace(request.AdvisoryId) &&
!string.Equals(fusion.AdvisoryId, request.AdvisoryId, StringComparison.OrdinalIgnoreCase))
{
continue;
}
// Build top sources (limited by MaxSources)
var topSources = fusion.Sources
.OrderByDescending(s => s.Score)
.ThenByDescending(s => s.Weight)
.Take(request.MaxSources)
.Select((s, index) => new PolicyDecisionSource(
Source: s.Source,
Weight: s.Weight,
Severity: s.Severity,
Score: s.Score,
Rank: index + 1))
.ToList();
// Build evidence summary if requested
PolicyDecisionEvidence? evidence = null;
if (request.IncludeEvidence)
{
evidence = BuildEvidence(fusion);
}
// Get conflict count for this component/advisory pair
var conflictKey = (fusion.ComponentPurl, fusion.AdvisoryId);
var conflictCount = conflictLookup.GetValueOrDefault(conflictKey, 0);
// Derive status from severity
var status = DeriveStatus(fusion.SeverityFused);
items.Add(new PolicyDecisionItem(
TenantId: fusion.TenantId,
ComponentPurl: fusion.ComponentPurl,
AdvisoryId: fusion.AdvisoryId,
SeverityFused: fusion.SeverityFused,
Score: fusion.Score,
Status: status,
TopSources: topSources,
Evidence: evidence,
ConflictCount: conflictCount,
ReasonCodes: fusion.ReasonCodes));
}
// Return deterministically ordered results
return items
.OrderBy(i => i.ComponentPurl, StringComparer.Ordinal)
.ThenBy(i => i.AdvisoryId, StringComparer.Ordinal)
.ThenBy(i => i.TenantId, StringComparer.Ordinal)
.ToList();
}
private PolicyDecisionEvidence BuildEvidence(SeverityFusionResult fusion)
{
// Build a deterministic evidence hash from the fusion result
var evidenceHash = $"{fusion.ComponentPurl}|{fusion.AdvisoryId}|{fusion.SnapshotId}";
var evidenceRequest = new EvidenceSummaryRequest(
EvidenceHash: evidenceHash,
FilePath: fusion.ComponentPurl,
Digest: null,
IngestedAt: null,
ConnectorId: fusion.Sources.FirstOrDefault()?.Source);
var response = _evidenceService.Summarize(evidenceRequest);
return new PolicyDecisionEvidence(
Headline: response.Summary.Headline,
Severity: response.Summary.Severity,
Locator: new PolicyDecisionLocator(
FilePath: response.Summary.Locator.FilePath,
Digest: response.Summary.Locator.Digest),
Signals: response.Summary.Signals);
}
private static PolicyDecisionSummary BuildSummary(
IReadOnlyList<PolicyDecisionItem> decisions,
IReadOnlyList<SeverityFusionResult> fused)
{
// Count decisions by severity
var severityCounts = decisions
.GroupBy(d => d.SeverityFused, StringComparer.OrdinalIgnoreCase)
.ToDictionary(
g => g.Key,
g => g.Count(),
StringComparer.OrdinalIgnoreCase);
// Calculate total conflicts
var totalConflicts = decisions.Sum(d => d.ConflictCount);
// Aggregate source ranks across all fused results
var sourceStats = fused
.SelectMany(f => f.Sources)
.GroupBy(s => s.Source, StringComparer.OrdinalIgnoreCase)
.Select(g => new PolicyDecisionSourceRank(
Source: g.Key,
TotalWeight: g.Sum(s => s.Weight),
DecisionCount: g.Count(),
AverageScore: g.Average(s => s.Score)))
.OrderByDescending(r => r.TotalWeight)
.ThenByDescending(r => r.AverageScore)
.ToList();
return new PolicyDecisionSummary(
TotalDecisions: decisions.Count,
TotalConflicts: totalConflicts,
SeverityCounts: severityCounts,
TopSeveritySources: sourceStats);
}
private static string DeriveStatus(string severity) => severity.ToLowerInvariant() switch
{
"critical" => "violation",
"high" => "violation",
"medium" => "warn",
_ => "ok"
};
}

View File

@@ -198,10 +198,11 @@ public sealed class RiskProfileConfigurationService
var validation = _validator.Validate(json);
if (!validation.IsValid)
{
var errorMessages = validation.Errors?.Values ?? Enumerable.Empty<string>();
_logger.LogWarning(
"Risk profile file '{File}' failed validation: {Errors}",
file,
string.Join("; ", validation.Message ?? "Unknown error"));
string.Join("; ", errorMessages.Any() ? errorMessages : new[] { "Unknown error" }));
continue;
}
}

View File

@@ -0,0 +1,140 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Request to run a risk simulation.
/// </summary>
public sealed record RiskSimulationRequest(
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string? ProfileVersion,
[property: JsonPropertyName("findings")] IReadOnlyList<SimulationFinding> Findings,
[property: JsonPropertyName("include_contributions")] bool IncludeContributions = true,
[property: JsonPropertyName("include_distribution")] bool IncludeDistribution = true,
[property: JsonPropertyName("simulation_mode")] SimulationMode Mode = SimulationMode.Full);
/// <summary>
/// A finding to include in the simulation.
/// </summary>
public sealed record SimulationFinding(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("component_purl")] string? ComponentPurl,
[property: JsonPropertyName("advisory_id")] string? AdvisoryId,
[property: JsonPropertyName("signals")] Dictionary<string, object?> Signals);
/// <summary>
/// Simulation mode.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<SimulationMode>))]
public enum SimulationMode
{
/// <summary>
/// Run full simulation with all computations.
/// </summary>
[JsonPropertyName("full")]
Full,
/// <summary>
/// Quick estimation without detailed breakdowns.
/// </summary>
[JsonPropertyName("quick")]
Quick,
/// <summary>
/// What-if analysis with hypothetical changes.
/// </summary>
[JsonPropertyName("whatif")]
WhatIf
}
/// <summary>
/// Result of a risk simulation.
/// </summary>
public sealed record RiskSimulationResult(
[property: JsonPropertyName("simulation_id")] string SimulationId,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string ProfileVersion,
[property: JsonPropertyName("profile_hash")] string ProfileHash,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("finding_scores")] IReadOnlyList<FindingScore> FindingScores,
[property: JsonPropertyName("distribution")] RiskDistribution? Distribution,
[property: JsonPropertyName("top_movers")] IReadOnlyList<TopMover>? TopMovers,
[property: JsonPropertyName("aggregate_metrics")] AggregateRiskMetrics AggregateMetrics,
[property: JsonPropertyName("execution_time_ms")] double ExecutionTimeMs);
/// <summary>
/// Computed risk score for a finding.
/// </summary>
public sealed record FindingScore(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("raw_score")] double RawScore,
[property: JsonPropertyName("normalized_score")] double NormalizedScore,
[property: JsonPropertyName("severity")] RiskSeverity Severity,
[property: JsonPropertyName("action")] RiskAction RecommendedAction,
[property: JsonPropertyName("contributions")] IReadOnlyList<SignalContribution>? Contributions,
[property: JsonPropertyName("overrides_applied")] IReadOnlyList<AppliedOverride>? OverridesApplied);
/// <summary>
/// Contribution of a signal to the risk score.
/// </summary>
public sealed record SignalContribution(
[property: JsonPropertyName("signal_name")] string SignalName,
[property: JsonPropertyName("signal_value")] object? SignalValue,
[property: JsonPropertyName("weight")] double Weight,
[property: JsonPropertyName("contribution")] double Contribution,
[property: JsonPropertyName("contribution_percentage")] double ContributionPercentage);
/// <summary>
/// An override that was applied during scoring.
/// </summary>
public sealed record AppliedOverride(
[property: JsonPropertyName("override_type")] string OverrideType,
[property: JsonPropertyName("predicate")] Dictionary<string, object> Predicate,
[property: JsonPropertyName("original_value")] object? OriginalValue,
[property: JsonPropertyName("applied_value")] object? AppliedValue,
[property: JsonPropertyName("reason")] string? Reason);
/// <summary>
/// Distribution of risk scores across findings.
/// </summary>
public sealed record RiskDistribution(
[property: JsonPropertyName("buckets")] IReadOnlyList<RiskBucket> Buckets,
[property: JsonPropertyName("percentiles")] Dictionary<string, double> Percentiles,
[property: JsonPropertyName("severity_breakdown")] Dictionary<string, int> SeverityBreakdown);
/// <summary>
/// A bucket in the risk distribution.
/// </summary>
public sealed record RiskBucket(
[property: JsonPropertyName("range_min")] double RangeMin,
[property: JsonPropertyName("range_max")] double RangeMax,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("percentage")] double Percentage);
/// <summary>
/// A top mover in risk scoring (highest impact findings).
/// </summary>
public sealed record TopMover(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("component_purl")] string? ComponentPurl,
[property: JsonPropertyName("score")] double Score,
[property: JsonPropertyName("severity")] RiskSeverity Severity,
[property: JsonPropertyName("primary_driver")] string PrimaryDriver,
[property: JsonPropertyName("driver_contribution")] double DriverContribution);
/// <summary>
/// Aggregate risk metrics across all findings.
/// </summary>
public sealed record AggregateRiskMetrics(
[property: JsonPropertyName("total_findings")] int TotalFindings,
[property: JsonPropertyName("mean_score")] double MeanScore,
[property: JsonPropertyName("median_score")] double MedianScore,
[property: JsonPropertyName("std_deviation")] double StdDeviation,
[property: JsonPropertyName("max_score")] double MaxScore,
[property: JsonPropertyName("min_score")] double MinScore,
[property: JsonPropertyName("critical_count")] int CriticalCount,
[property: JsonPropertyName("high_count")] int HighCount,
[property: JsonPropertyName("medium_count")] int MediumCount,
[property: JsonPropertyName("low_count")] int LowCount,
[property: JsonPropertyName("informational_count")] int InformationalCount);

View File

@@ -0,0 +1,461 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Telemetry;
using StellaOps.Policy.RiskProfile.Hashing;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Service for running risk simulations with score distributions and contribution breakdowns.
/// </summary>
public sealed class RiskSimulationService
{
private readonly ILogger<RiskSimulationService> _logger;
private readonly TimeProvider _timeProvider;
private readonly RiskProfileConfigurationService _profileService;
private readonly RiskProfileHasher _hasher;
private static readonly double[] PercentileLevels = { 0.25, 0.50, 0.75, 0.90, 0.95, 0.99 };
private const int TopMoverCount = 10;
private const int BucketCount = 10;
public RiskSimulationService(
ILogger<RiskSimulationService> logger,
TimeProvider timeProvider,
RiskProfileConfigurationService profileService)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_profileService = profileService ?? throw new ArgumentNullException(nameof(profileService));
_hasher = new RiskProfileHasher();
}
/// <summary>
/// Runs a risk simulation.
/// </summary>
public RiskSimulationResult Simulate(RiskSimulationRequest request)
{
ArgumentNullException.ThrowIfNull(request);
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("risk_simulation.run");
activity?.SetTag("profile.id", request.ProfileId);
activity?.SetTag("finding.count", request.Findings.Count);
var sw = Stopwatch.StartNew();
var profile = _profileService.GetProfile(request.ProfileId);
if (profile == null)
{
throw new InvalidOperationException($"Risk profile '{request.ProfileId}' not found.");
}
var profileHash = _hasher.ComputeHash(profile);
var simulationId = GenerateSimulationId(request, profileHash);
var findingScores = request.Findings
.Select(f => ComputeFindingScore(f, profile, request.IncludeContributions))
.ToList();
var distribution = request.IncludeDistribution
? ComputeDistribution(findingScores)
: null;
var topMovers = request.IncludeContributions
? ComputeTopMovers(findingScores, request.Findings)
: null;
var aggregateMetrics = ComputeAggregateMetrics(findingScores);
sw.Stop();
_logger.LogInformation(
"Risk simulation {SimulationId} completed for {FindingCount} findings in {ElapsedMs}ms",
simulationId, request.Findings.Count, sw.Elapsed.TotalMilliseconds);
PolicyEngineTelemetry.RiskSimulationsRun.Add(1);
return new RiskSimulationResult(
SimulationId: simulationId,
ProfileId: profile.Id,
ProfileVersion: profile.Version,
ProfileHash: profileHash,
Timestamp: _timeProvider.GetUtcNow(),
FindingScores: findingScores.AsReadOnly(),
Distribution: distribution,
TopMovers: topMovers,
AggregateMetrics: aggregateMetrics,
ExecutionTimeMs: sw.Elapsed.TotalMilliseconds);
}
private FindingScore ComputeFindingScore(
SimulationFinding finding,
RiskProfileModel profile,
bool includeContributions)
{
var contributions = new List<SignalContribution>();
var overridesApplied = new List<AppliedOverride>();
var rawScore = 0.0;
// Compute score from signals and weights
foreach (var signal in profile.Signals)
{
if (!finding.Signals.TryGetValue(signal.Name, out var signalValue))
{
continue;
}
var numericValue = ConvertToNumeric(signalValue, signal.Type);
var weight = profile.Weights.GetValueOrDefault(signal.Name, 0.0);
var contribution = numericValue * weight;
rawScore += contribution;
if (includeContributions)
{
contributions.Add(new SignalContribution(
SignalName: signal.Name,
SignalValue: signalValue,
Weight: weight,
Contribution: contribution,
ContributionPercentage: 0.0)); // Will be computed after total
}
}
// Normalize score to 0-100 range
var normalizedScore = Math.Clamp(rawScore * 10, 0, 100);
// Apply severity overrides
var severity = DetermineSeverity(normalizedScore);
foreach (var severityOverride in profile.Overrides.Severity)
{
if (MatchesPredicate(finding.Signals, severityOverride.When))
{
var originalSeverity = severity;
severity = severityOverride.Set;
if (includeContributions)
{
overridesApplied.Add(new AppliedOverride(
OverrideType: "severity",
Predicate: severityOverride.When,
OriginalValue: originalSeverity.ToString(),
AppliedValue: severity.ToString(),
Reason: null));
}
break;
}
}
// Apply decision overrides
var recommendedAction = DetermineAction(severity);
foreach (var decisionOverride in profile.Overrides.Decisions)
{
if (MatchesPredicate(finding.Signals, decisionOverride.When))
{
var originalAction = recommendedAction;
recommendedAction = decisionOverride.Action;
if (includeContributions)
{
overridesApplied.Add(new AppliedOverride(
OverrideType: "decision",
Predicate: decisionOverride.When,
OriginalValue: originalAction.ToString(),
AppliedValue: recommendedAction.ToString(),
Reason: decisionOverride.Reason));
}
break;
}
}
// Update contribution percentages
if (includeContributions && rawScore > 0)
{
contributions = contributions
.Select(c => c with { ContributionPercentage = (c.Contribution / rawScore) * 100 })
.ToList();
}
return new FindingScore(
FindingId: finding.FindingId,
RawScore: rawScore,
NormalizedScore: normalizedScore,
Severity: severity,
RecommendedAction: recommendedAction,
Contributions: includeContributions ? contributions.AsReadOnly() : null,
OverridesApplied: includeContributions && overridesApplied.Count > 0
? overridesApplied.AsReadOnly()
: null);
}
private static double ConvertToNumeric(object? value, RiskSignalType signalType)
{
if (value == null)
{
return 0.0;
}
return signalType switch
{
RiskSignalType.Boolean => value switch
{
bool b => b ? 1.0 : 0.0,
JsonElement je when je.ValueKind == JsonValueKind.True => 1.0,
JsonElement je when je.ValueKind == JsonValueKind.False => 0.0,
string s when bool.TryParse(s, out var b) => b ? 1.0 : 0.0,
_ => 0.0
},
RiskSignalType.Numeric => value switch
{
double d => d,
float f => f,
int i => i,
long l => l,
decimal dec => (double)dec,
JsonElement je when je.TryGetDouble(out var d) => d,
string s when double.TryParse(s, out var d) => d,
_ => 0.0
},
RiskSignalType.Categorical => value switch
{
string s => MapCategoricalToNumeric(s),
JsonElement je when je.ValueKind == JsonValueKind.String => MapCategoricalToNumeric(je.GetString() ?? ""),
_ => 0.0
},
_ => 0.0
};
}
private static double MapCategoricalToNumeric(string category)
{
return category.ToLowerInvariant() switch
{
"none" or "unknown" => 0.0,
"indirect" or "low" => 0.3,
"direct" or "medium" => 0.6,
"high" or "critical" => 1.0,
_ => 0.5
};
}
private static RiskSeverity DetermineSeverity(double score)
{
return score switch
{
>= 90 => RiskSeverity.Critical,
>= 70 => RiskSeverity.High,
>= 40 => RiskSeverity.Medium,
>= 10 => RiskSeverity.Low,
_ => RiskSeverity.Informational
};
}
private static RiskAction DetermineAction(RiskSeverity severity)
{
return severity switch
{
RiskSeverity.Critical => RiskAction.Deny,
RiskSeverity.High => RiskAction.Deny,
RiskSeverity.Medium => RiskAction.Review,
_ => RiskAction.Allow
};
}
private static bool MatchesPredicate(Dictionary<string, object?> signals, Dictionary<string, object> predicate)
{
foreach (var (key, expected) in predicate)
{
if (!signals.TryGetValue(key, out var actual))
{
return false;
}
if (!ValuesEqual(actual, expected))
{
return false;
}
}
return true;
}
private static bool ValuesEqual(object? a, object? b)
{
if (a == null && b == null) return true;
if (a == null || b == null) return false;
// Handle JsonElement comparisons
if (a is JsonElement jeA && b is JsonElement jeB)
{
return jeA.GetRawText() == jeB.GetRawText();
}
if (a is JsonElement je)
{
a = je.ValueKind switch
{
JsonValueKind.String => je.GetString(),
JsonValueKind.Number => je.GetDouble(),
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => je.GetRawText()
};
}
if (b is JsonElement jeb)
{
b = jeb.ValueKind switch
{
JsonValueKind.String => jeb.GetString(),
JsonValueKind.Number => jeb.GetDouble(),
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => jeb.GetRawText()
};
}
return Equals(a, b);
}
private static RiskDistribution ComputeDistribution(List<FindingScore> scores)
{
if (scores.Count == 0)
{
return new RiskDistribution(
Buckets: Array.Empty<RiskBucket>(),
Percentiles: new Dictionary<string, double>(),
SeverityBreakdown: new Dictionary<string, int>
{
["critical"] = 0,
["high"] = 0,
["medium"] = 0,
["low"] = 0,
["informational"] = 0
});
}
var normalizedScores = scores.Select(s => s.NormalizedScore).OrderBy(x => x).ToList();
// Compute buckets
var buckets = new List<RiskBucket>();
var bucketSize = 100.0 / BucketCount;
for (var i = 0; i < BucketCount; i++)
{
var rangeMin = i * bucketSize;
var rangeMax = (i + 1) * bucketSize;
var count = normalizedScores.Count(s => s >= rangeMin && s < rangeMax);
buckets.Add(new RiskBucket(
RangeMin: rangeMin,
RangeMax: rangeMax,
Count: count,
Percentage: (double)count / scores.Count * 100));
}
// Compute percentiles
var percentiles = new Dictionary<string, double>();
foreach (var level in PercentileLevels)
{
var index = (int)(level * (normalizedScores.Count - 1));
percentiles[$"p{(int)(level * 100)}"] = normalizedScores[index];
}
// Severity breakdown
var severityBreakdown = scores
.GroupBy(s => s.Severity.ToString().ToLowerInvariant())
.ToDictionary(g => g.Key, g => g.Count());
// Ensure all severities are present
foreach (var sev in new[] { "critical", "high", "medium", "low", "informational" })
{
severityBreakdown.TryAdd(sev, 0);
}
return new RiskDistribution(
Buckets: buckets.AsReadOnly(),
Percentiles: percentiles,
SeverityBreakdown: severityBreakdown);
}
private static IReadOnlyList<TopMover> ComputeTopMovers(
List<FindingScore> scores,
IReadOnlyList<SimulationFinding> findings)
{
var findingLookup = findings.ToDictionary(f => f.FindingId, StringComparer.OrdinalIgnoreCase);
return scores
.OrderByDescending(s => s.NormalizedScore)
.Take(TopMoverCount)
.Select(s =>
{
var finding = findingLookup.GetValueOrDefault(s.FindingId);
var primaryContribution = s.Contributions?
.OrderByDescending(c => c.ContributionPercentage)
.FirstOrDefault();
return new TopMover(
FindingId: s.FindingId,
ComponentPurl: finding?.ComponentPurl,
Score: s.NormalizedScore,
Severity: s.Severity,
PrimaryDriver: primaryContribution?.SignalName ?? "unknown",
DriverContribution: primaryContribution?.ContributionPercentage ?? 0);
})
.ToList()
.AsReadOnly();
}
private static AggregateRiskMetrics ComputeAggregateMetrics(List<FindingScore> scores)
{
if (scores.Count == 0)
{
return new AggregateRiskMetrics(
TotalFindings: 0,
MeanScore: 0,
MedianScore: 0,
StdDeviation: 0,
MaxScore: 0,
MinScore: 0,
CriticalCount: 0,
HighCount: 0,
MediumCount: 0,
LowCount: 0,
InformationalCount: 0);
}
var normalizedScores = scores.Select(s => s.NormalizedScore).ToList();
var mean = normalizedScores.Average();
var sortedScores = normalizedScores.OrderBy(x => x).ToList();
var median = sortedScores.Count % 2 == 0
? (sortedScores[sortedScores.Count / 2 - 1] + sortedScores[sortedScores.Count / 2]) / 2
: sortedScores[sortedScores.Count / 2];
var variance = normalizedScores.Average(s => Math.Pow(s - mean, 2));
var stdDev = Math.Sqrt(variance);
return new AggregateRiskMetrics(
TotalFindings: scores.Count,
MeanScore: Math.Round(mean, 2),
MedianScore: Math.Round(median, 2),
StdDeviation: Math.Round(stdDev, 2),
MaxScore: normalizedScores.Max(),
MinScore: normalizedScores.Min(),
CriticalCount: scores.Count(s => s.Severity == RiskSeverity.Critical),
HighCount: scores.Count(s => s.Severity == RiskSeverity.High),
MediumCount: scores.Count(s => s.Severity == RiskSeverity.Medium),
LowCount: scores.Count(s => s.Severity == RiskSeverity.Low),
InformationalCount: scores.Count(s => s.Severity == RiskSeverity.Informational));
}
private static string GenerateSimulationId(RiskSimulationRequest request, string profileHash)
{
var seed = $"{request.ProfileId}|{profileHash}|{request.Findings.Count}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"rsim-{Convert.ToHexStringLower(hash)[..16]}";
}
}

View File

@@ -139,10 +139,7 @@ public sealed class IncidentModeSampler : Sampler
// During incident mode, always sample
if (_incidentModeService.IsActive)
{
return new SamplingResult(
SamplingDecision.RecordAndSample,
samplingParameters.Tags,
samplingParameters.Links);
return new SamplingResult(SamplingDecision.RecordAndSample);
}
// Otherwise, use the base sampler

View File

@@ -35,9 +35,9 @@ public static class PolicyEngineTelemetry
// Gauge: policy_run_queue_depth{tenant}
private static readonly ObservableGauge<int> PolicyRunQueueDepthGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<int>(
"policy_run_queue_depth",
observeValue: () => QueueDepthObservations,
observeValues: () => QueueDepthObservations ?? Enumerable.Empty<Measurement<int>>(),
unit: "jobs",
description: "Current depth of pending policy run jobs per tenant.");
@@ -148,17 +148,17 @@ public static class PolicyEngineTelemetry
// Gauge: policy_concurrent_evaluations{tenant}
private static readonly ObservableGauge<int> ConcurrentEvaluationsGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<int>(
"policy_concurrent_evaluations",
observeValue: () => ConcurrentEvaluationsObservations,
observeValues: () => ConcurrentEvaluationsObservations ?? Enumerable.Empty<Measurement<int>>(),
unit: "evaluations",
description: "Current number of concurrent policy evaluations.");
// Gauge: policy_worker_utilization
private static readonly ObservableGauge<double> WorkerUtilizationGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<double>(
"policy_worker_utilization",
observeValue: () => WorkerUtilizationObservations,
observeValues: () => WorkerUtilizationObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "Worker pool utilization ratio (0.0 to 1.0).");
@@ -168,17 +168,17 @@ public static class PolicyEngineTelemetry
// Gauge: policy_slo_burn_rate{slo_name}
private static readonly ObservableGauge<double> SloBurnRateGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<double>(
"policy_slo_burn_rate",
observeValue: () => SloBurnRateObservations,
observeValues: () => SloBurnRateObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "SLO burn rate over configured window.");
// Gauge: policy_error_budget_remaining{slo_name}
private static readonly ObservableGauge<double> ErrorBudgetRemainingGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<double>(
"policy_error_budget_remaining",
observeValue: () => ErrorBudgetObservations,
observeValues: () => ErrorBudgetObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "Remaining error budget as ratio (0.0 to 1.0).");
@@ -265,6 +265,143 @@ public static class PolicyEngineTelemetry
#endregion
#region Risk Simulation and Events Metrics
// Counter: policy_risk_simulations_run_total
private static readonly Counter<long> RiskSimulationsRunCounter =
Meter.CreateCounter<long>(
"policy_risk_simulations_run_total",
unit: "simulations",
description: "Total risk simulations executed.");
// Counter: policy_profile_events_published_total
private static readonly Counter<long> ProfileEventsPublishedCounter =
Meter.CreateCounter<long>(
"policy_profile_events_published_total",
unit: "events",
description: "Total profile lifecycle events published.");
/// <summary>
/// Counter for risk simulations run.
/// </summary>
public static Counter<long> RiskSimulationsRun => RiskSimulationsRunCounter;
/// <summary>
/// Counter for profile events published.
/// </summary>
public static Counter<long> ProfileEventsPublished => ProfileEventsPublishedCounter;
#endregion
#region Reachability Metrics
// Counter: policy_reachability_applied_total{state}
private static readonly Counter<long> ReachabilityAppliedCounter =
Meter.CreateCounter<long>(
"policy_reachability_applied_total",
unit: "facts",
description: "Total reachability facts applied during policy evaluation.");
// Counter: policy_reachability_cache_hits_total
private static readonly Counter<long> ReachabilityCacheHitsCounter =
Meter.CreateCounter<long>(
"policy_reachability_cache_hits_total",
unit: "hits",
description: "Total reachability facts cache hits.");
// Counter: policy_reachability_cache_misses_total
private static readonly Counter<long> ReachabilityCacheMissesCounter =
Meter.CreateCounter<long>(
"policy_reachability_cache_misses_total",
unit: "misses",
description: "Total reachability facts cache misses.");
// Gauge: policy_reachability_cache_hit_ratio
private static readonly ObservableGauge<double> ReachabilityCacheHitRatioGauge =
Meter.CreateObservableGauge<double>(
"policy_reachability_cache_hit_ratio",
observeValues: () => ReachabilityCacheHitRatioObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "Reachability facts cache hit ratio (0.0 to 1.0).");
// Counter: policy_reachability_lookups_total{outcome}
private static readonly Counter<long> ReachabilityLookupsCounter =
Meter.CreateCounter<long>(
"policy_reachability_lookups_total",
unit: "lookups",
description: "Total reachability facts lookup operations.");
// Histogram: policy_reachability_lookup_seconds
private static readonly Histogram<double> ReachabilityLookupSecondsHistogram =
Meter.CreateHistogram<double>(
"policy_reachability_lookup_seconds",
unit: "s",
description: "Duration of reachability facts lookup operations.");
private static IEnumerable<Measurement<double>> ReachabilityCacheHitRatioObservations = Enumerable.Empty<Measurement<double>>();
/// <summary>
/// Records reachability fact applied during evaluation.
/// </summary>
/// <param name="state">Reachability state (reachable, unreachable, unknown, under_investigation).</param>
/// <param name="count">Number of facts.</param>
public static void RecordReachabilityApplied(string state, long count = 1)
{
var tags = new TagList
{
{ "state", NormalizeTag(state) },
};
ReachabilityAppliedCounter.Add(count, tags);
}
/// <summary>
/// Records reachability cache hits.
/// </summary>
/// <param name="count">Number of hits.</param>
public static void RecordReachabilityCacheHits(long count)
{
ReachabilityCacheHitsCounter.Add(count);
}
/// <summary>
/// Records reachability cache misses.
/// </summary>
/// <param name="count">Number of misses.</param>
public static void RecordReachabilityCacheMisses(long count)
{
ReachabilityCacheMissesCounter.Add(count);
}
/// <summary>
/// Records a reachability lookup operation.
/// </summary>
/// <param name="outcome">Outcome (found, not_found, error).</param>
/// <param name="seconds">Duration in seconds.</param>
/// <param name="batchSize">Number of items looked up.</param>
public static void RecordReachabilityLookup(string outcome, double seconds, int batchSize)
{
var tags = new TagList
{
{ "outcome", NormalizeTag(outcome) },
};
ReachabilityLookupsCounter.Add(batchSize, tags);
ReachabilityLookupSecondsHistogram.Record(seconds, tags);
}
/// <summary>
/// Registers a callback to observe reachability cache hit ratio.
/// </summary>
/// <param name="observeFunc">Function that returns current cache hit ratio measurements.</param>
public static void RegisterReachabilityCacheHitRatioObservation(Func<IEnumerable<Measurement<double>>> observeFunc)
{
ArgumentNullException.ThrowIfNull(observeFunc);
ReachabilityCacheHitRatioObservations = observeFunc();
}
#endregion
// Storage for observable gauge observations
private static IEnumerable<Measurement<int>> QueueDepthObservations = Enumerable.Empty<Measurement<int>>();
private static IEnumerable<Measurement<int>> ConcurrentEvaluationsObservations = Enumerable.Empty<Measurement<int>>();

View File

@@ -28,7 +28,7 @@ public static class RiskProfileCanonicalizer
public static byte[] CanonicalizeToUtf8(ReadOnlySpan<byte> utf8Json)
{
using var doc = JsonDocument.Parse(utf8Json, DocOptions);
using var doc = JsonDocument.Parse(utf8Json.ToArray(), DocOptions);
var canonical = CanonicalizeElement(doc.RootElement);
return Encoding.UTF8.GetBytes(canonical);
}
@@ -103,11 +103,11 @@ public static class RiskProfileCanonicalizer
}
else if (IsSeverityOverrides(path))
{
items = items.OrderBy(GetWhenThenKey, StringComparer.Ordinal).ToList();
items = items.OrderBy(GetWhenThenKeyFromNode, StringComparer.Ordinal).ToList();
}
else if (IsDecisionOverrides(path))
{
items = items.OrderBy(GetWhenThenKey, StringComparer.Ordinal).ToList();
items = items.OrderBy(GetWhenThenKeyFromNode, StringComparer.Ordinal).ToList();
}
array.Clear();
@@ -303,6 +303,19 @@ public static class RiskProfileCanonicalizer
return when + "|" + then;
}
private static string GetWhenThenKeyFromNode(JsonNode? node)
{
if (node is null) return string.Empty;
var obj = node.AsObject();
var when = obj.TryGetPropertyValue("when", out var whenNode) && whenNode is not null ? whenNode.ToJsonString() : string.Empty;
var then = obj.TryGetPropertyValue("set", out var setNode) && setNode is not null
? setNode.ToJsonString()
: obj.TryGetPropertyValue("action", out var actionNode) && actionNode is not null
? actionNode.ToJsonString()
: string.Empty;
return when + "|" + then;
}
private static bool IsSignals(IReadOnlyList<string> path)
=> path.Count >= 1 && path[^1] == "signals";

View File

@@ -0,0 +1,115 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Lifecycle;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.RiskProfile.Export;
/// <summary>
/// Exported risk profile bundle with signature.
/// </summary>
public sealed record RiskProfileBundle(
[property: JsonPropertyName("bundle_id")] string BundleId,
[property: JsonPropertyName("format_version")] string FormatVersion,
[property: JsonPropertyName("created_at")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("created_by")] string? CreatedBy,
[property: JsonPropertyName("profiles")] IReadOnlyList<ExportedProfile> Profiles,
[property: JsonPropertyName("signature")] BundleSignature? Signature,
[property: JsonPropertyName("metadata")] BundleMetadata Metadata);
/// <summary>
/// An exported profile with its lifecycle info.
/// </summary>
public sealed record ExportedProfile(
[property: JsonPropertyName("profile")] RiskProfileModel Profile,
[property: JsonPropertyName("lifecycle")] RiskProfileVersionInfo? Lifecycle,
[property: JsonPropertyName("content_hash")] string ContentHash);
/// <summary>
/// Signature for a profile bundle.
/// </summary>
public sealed record BundleSignature(
[property: JsonPropertyName("algorithm")] string Algorithm,
[property: JsonPropertyName("key_id")] string? KeyId,
[property: JsonPropertyName("value")] string Value,
[property: JsonPropertyName("signed_at")] DateTimeOffset SignedAt,
[property: JsonPropertyName("signed_by")] string? SignedBy);
/// <summary>
/// Metadata for an exported bundle.
/// </summary>
public sealed record BundleMetadata(
[property: JsonPropertyName("source_system")] string SourceSystem,
[property: JsonPropertyName("source_version")] string SourceVersion,
[property: JsonPropertyName("profile_count")] int ProfileCount,
[property: JsonPropertyName("total_hash")] string TotalHash,
[property: JsonPropertyName("description")] string? Description,
[property: JsonPropertyName("tags")] IReadOnlyList<string>? Tags);
/// <summary>
/// Request to export profiles.
/// </summary>
public sealed record ExportProfilesRequest(
[property: JsonPropertyName("profile_ids")] IReadOnlyList<string> ProfileIds,
[property: JsonPropertyName("include_all_versions")] bool IncludeAllVersions = false,
[property: JsonPropertyName("sign_bundle")] bool SignBundle = true,
[property: JsonPropertyName("key_id")] string? KeyId = null,
[property: JsonPropertyName("description")] string? Description = null,
[property: JsonPropertyName("tags")] IReadOnlyList<string>? Tags = null);
/// <summary>
/// Request to import profiles.
/// </summary>
public sealed record ImportProfilesRequest(
[property: JsonPropertyName("bundle")] RiskProfileBundle Bundle,
[property: JsonPropertyName("verify_signature")] bool VerifySignature = true,
[property: JsonPropertyName("overwrite_existing")] bool OverwriteExisting = false,
[property: JsonPropertyName("activate_on_import")] bool ActivateOnImport = false);
/// <summary>
/// Result of import operation.
/// </summary>
public sealed record ImportResult(
[property: JsonPropertyName("bundle_id")] string BundleId,
[property: JsonPropertyName("imported_count")] int ImportedCount,
[property: JsonPropertyName("skipped_count")] int SkippedCount,
[property: JsonPropertyName("error_count")] int ErrorCount,
[property: JsonPropertyName("details")] IReadOnlyList<ImportProfileResult> Details,
[property: JsonPropertyName("signature_verified")] bool? SignatureVerified);
/// <summary>
/// Result of importing a single profile.
/// </summary>
public sealed record ImportProfileResult(
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("status")] ImportStatus Status,
[property: JsonPropertyName("message")] string? Message);
/// <summary>
/// Status of profile import.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ImportStatus>))]
public enum ImportStatus
{
[JsonPropertyName("imported")]
Imported,
[JsonPropertyName("skipped")]
Skipped,
[JsonPropertyName("error")]
Error,
[JsonPropertyName("updated")]
Updated
}
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record SignatureVerificationResult(
[property: JsonPropertyName("is_valid")] bool IsValid,
[property: JsonPropertyName("algorithm")] string? Algorithm,
[property: JsonPropertyName("key_id")] string? KeyId,
[property: JsonPropertyName("signed_at")] DateTimeOffset? SignedAt,
[property: JsonPropertyName("error")] string? Error);

View File

@@ -0,0 +1,356 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.Policy.RiskProfile.Hashing;
using StellaOps.Policy.RiskProfile.Lifecycle;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.RiskProfile.Export;
/// <summary>
/// Service for exporting and importing risk profiles with signatures.
/// </summary>
public sealed class ProfileExportService
{
private const string FormatVersion = "1.0";
private const string SourceSystem = "StellaOps.Policy";
private const string DefaultAlgorithm = "HMAC-SHA256";
private readonly TimeProvider _timeProvider;
private readonly RiskProfileHasher _hasher;
private readonly Func<string, RiskProfileModel?>? _profileLookup;
private readonly Func<string, RiskProfileVersionInfo?>? _lifecycleLookup;
private readonly Action<RiskProfileModel>? _profileSave;
private readonly Func<string, string?>? _keyLookup;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public ProfileExportService(
TimeProvider? timeProvider = null,
Func<string, RiskProfileModel?>? profileLookup = null,
Func<string, RiskProfileVersionInfo?>? lifecycleLookup = null,
Action<RiskProfileModel>? profileSave = null,
Func<string, string?>? keyLookup = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_hasher = new RiskProfileHasher();
_profileLookup = profileLookup;
_lifecycleLookup = lifecycleLookup;
_profileSave = profileSave;
_keyLookup = keyLookup;
}
/// <summary>
/// Exports profiles to a signed bundle.
/// </summary>
public RiskProfileBundle Export(
IReadOnlyList<RiskProfileModel> profiles,
ExportProfilesRequest request,
string? exportedBy = null)
{
ArgumentNullException.ThrowIfNull(profiles);
ArgumentNullException.ThrowIfNull(request);
var now = _timeProvider.GetUtcNow();
var bundleId = GenerateBundleId(now);
var exportedProfiles = profiles.Select(p => new ExportedProfile(
Profile: p,
Lifecycle: _lifecycleLookup?.Invoke(p.Id),
ContentHash: _hasher.ComputeContentHash(p)
)).ToList();
var totalHash = ComputeTotalHash(exportedProfiles);
var metadata = new BundleMetadata(
SourceSystem: SourceSystem,
SourceVersion: GetSourceVersion(),
ProfileCount: exportedProfiles.Count,
TotalHash: totalHash,
Description: request.Description,
Tags: request.Tags);
BundleSignature? signature = null;
if (request.SignBundle)
{
signature = SignBundle(exportedProfiles, metadata, request.KeyId, exportedBy, now);
}
return new RiskProfileBundle(
BundleId: bundleId,
FormatVersion: FormatVersion,
CreatedAt: now,
CreatedBy: exportedBy,
Profiles: exportedProfiles.AsReadOnly(),
Signature: signature,
Metadata: metadata);
}
/// <summary>
/// Imports profiles from a bundle.
/// </summary>
public ImportResult Import(
ImportProfilesRequest request,
string? importedBy = null)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Bundle);
var bundle = request.Bundle;
var details = new List<ImportProfileResult>();
var importedCount = 0;
var skippedCount = 0;
var errorCount = 0;
bool? signatureVerified = null;
// Verify signature if requested
if (request.VerifySignature && bundle.Signature != null)
{
var verification = VerifySignature(bundle);
signatureVerified = verification.IsValid;
if (!verification.IsValid)
{
return new ImportResult(
BundleId: bundle.BundleId,
ImportedCount: 0,
SkippedCount: 0,
ErrorCount: bundle.Profiles.Count,
Details: bundle.Profiles.Select(p => new ImportProfileResult(
ProfileId: p.Profile.Id,
Version: p.Profile.Version,
Status: ImportStatus.Error,
Message: $"Signature verification failed: {verification.Error}"
)).ToList().AsReadOnly(),
SignatureVerified: false);
}
}
foreach (var exported in bundle.Profiles)
{
try
{
// Verify content hash
var computedHash = _hasher.ComputeContentHash(exported.Profile);
if (computedHash != exported.ContentHash)
{
details.Add(new ImportProfileResult(
ProfileId: exported.Profile.Id,
Version: exported.Profile.Version,
Status: ImportStatus.Error,
Message: "Content hash mismatch - profile may have been tampered with."));
errorCount++;
continue;
}
// Check if profile already exists
var existing = _profileLookup?.Invoke(exported.Profile.Id);
if (existing != null && !request.OverwriteExisting)
{
details.Add(new ImportProfileResult(
ProfileId: exported.Profile.Id,
Version: exported.Profile.Version,
Status: ImportStatus.Skipped,
Message: "Profile already exists and overwrite not enabled."));
skippedCount++;
continue;
}
// Save profile
_profileSave?.Invoke(exported.Profile);
var status = existing != null ? ImportStatus.Updated : ImportStatus.Imported;
details.Add(new ImportProfileResult(
ProfileId: exported.Profile.Id,
Version: exported.Profile.Version,
Status: status,
Message: null));
importedCount++;
}
catch (Exception ex)
{
details.Add(new ImportProfileResult(
ProfileId: exported.Profile.Id,
Version: exported.Profile.Version,
Status: ImportStatus.Error,
Message: ex.Message));
errorCount++;
}
}
return new ImportResult(
BundleId: bundle.BundleId,
ImportedCount: importedCount,
SkippedCount: skippedCount,
ErrorCount: errorCount,
Details: details.AsReadOnly(),
SignatureVerified: signatureVerified);
}
/// <summary>
/// Verifies the signature of a bundle.
/// </summary>
public SignatureVerificationResult VerifySignature(RiskProfileBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
if (bundle.Signature == null)
{
return new SignatureVerificationResult(
IsValid: false,
Algorithm: null,
KeyId: null,
SignedAt: null,
Error: "Bundle has no signature.");
}
try
{
// Get the signing key
var key = bundle.Signature.KeyId != null
? _keyLookup?.Invoke(bundle.Signature.KeyId)
: GetDefaultSigningKey();
if (string.IsNullOrEmpty(key))
{
return new SignatureVerificationResult(
IsValid: false,
Algorithm: bundle.Signature.Algorithm,
KeyId: bundle.Signature.KeyId,
SignedAt: bundle.Signature.SignedAt,
Error: "Signing key not found.");
}
// Compute expected signature
var data = ComputeSignatureData(bundle.Profiles.ToList(), bundle.Metadata);
var expectedSignature = ComputeHmacSignature(data, key);
var isValid = string.Equals(expectedSignature, bundle.Signature.Value, StringComparison.OrdinalIgnoreCase);
return new SignatureVerificationResult(
IsValid: isValid,
Algorithm: bundle.Signature.Algorithm,
KeyId: bundle.Signature.KeyId,
SignedAt: bundle.Signature.SignedAt,
Error: isValid ? null : "Signature does not match.");
}
catch (Exception ex)
{
return new SignatureVerificationResult(
IsValid: false,
Algorithm: bundle.Signature.Algorithm,
KeyId: bundle.Signature.KeyId,
SignedAt: bundle.Signature.SignedAt,
Error: $"Verification error: {ex.Message}");
}
}
/// <summary>
/// Serializes a bundle to JSON.
/// </summary>
public string SerializeBundle(RiskProfileBundle bundle)
{
return JsonSerializer.Serialize(bundle, JsonOptions);
}
/// <summary>
/// Deserializes a bundle from JSON.
/// </summary>
public RiskProfileBundle? DeserializeBundle(string json)
{
return JsonSerializer.Deserialize<RiskProfileBundle>(json, JsonOptions);
}
private BundleSignature SignBundle(
IReadOnlyList<ExportedProfile> profiles,
BundleMetadata metadata,
string? keyId,
string? signedBy,
DateTimeOffset signedAt)
{
var key = keyId != null
? _keyLookup?.Invoke(keyId)
: GetDefaultSigningKey();
if (string.IsNullOrEmpty(key))
{
// Use a default key for development/testing
key = GetDefaultSigningKey();
}
var data = ComputeSignatureData(profiles.ToList(), metadata);
var signatureValue = ComputeHmacSignature(data, key);
return new BundleSignature(
Algorithm: DefaultAlgorithm,
KeyId: keyId,
Value: signatureValue,
SignedAt: signedAt,
SignedBy: signedBy);
}
private static string ComputeSignatureData(List<ExportedProfile> profiles, BundleMetadata metadata)
{
var sb = new StringBuilder();
// Include all content hashes in order
foreach (var profile in profiles.OrderBy(p => p.Profile.Id).ThenBy(p => p.Profile.Version))
{
sb.Append(profile.ContentHash);
sb.Append('|');
}
// Include metadata
sb.Append(metadata.TotalHash);
sb.Append('|');
sb.Append(metadata.ProfileCount);
return sb.ToString();
}
private static string ComputeHmacSignature(string data, string key)
{
var keyBytes = Encoding.UTF8.GetBytes(key);
var dataBytes = Encoding.UTF8.GetBytes(data);
using var hmac = new HMACSHA256(keyBytes);
var hashBytes = hmac.ComputeHash(dataBytes);
return Convert.ToHexStringLower(hashBytes);
}
private string ComputeTotalHash(IReadOnlyList<ExportedProfile> profiles)
{
var combined = string.Join("|", profiles
.OrderBy(p => p.Profile.Id)
.ThenBy(p => p.Profile.Version)
.Select(p => p.ContentHash));
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
return Convert.ToHexStringLower(hashBytes);
}
private static string GenerateBundleId(DateTimeOffset timestamp)
{
var seed = $"{timestamp:O}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"rpb-{Convert.ToHexStringLower(hash)[..16]}";
}
private static string GetSourceVersion()
{
return typeof(ProfileExportService).Assembly.GetName().Version?.ToString() ?? "1.0.0";
}
private static string GetDefaultSigningKey()
{
// In production, this would come from secure key management
// For now, use a placeholder that should be overridden
return "stellaops-default-signing-key-change-in-production";
}
}

View File

@@ -480,8 +480,10 @@ public sealed class RiskProfileLifecycleService
foreach (var key in allKeys)
{
var fromHas = from.Metadata?.TryGetValue(key, out var fromValue) ?? false;
var toHas = to.Metadata?.TryGetValue(key, out var toValue) ?? false;
object? fromValue = null;
object? toValue = null;
var fromHas = from.Metadata?.TryGetValue(key, out fromValue) ?? false;
var toHas = to.Metadata?.TryGetValue(key, out toValue) ?? false;
if (fromHas != toHas || (fromHas && toHas && !Equals(fromValue, toValue)))
{

View File

@@ -0,0 +1,266 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.RiskProfile.Overrides;
/// <summary>
/// An override with full audit metadata.
/// </summary>
public sealed record AuditedOverride(
[property: JsonPropertyName("override_id")] string OverrideId,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("override_type")] OverrideType OverrideType,
[property: JsonPropertyName("predicate")] OverridePredicate Predicate,
[property: JsonPropertyName("action")] OverrideAction Action,
[property: JsonPropertyName("priority")] int Priority,
[property: JsonPropertyName("audit")] OverrideAuditMetadata Audit,
[property: JsonPropertyName("status")] OverrideStatus Status,
[property: JsonPropertyName("expiration")] DateTimeOffset? Expiration = null,
[property: JsonPropertyName("tags")] IReadOnlyList<string>? Tags = null);
/// <summary>
/// Type of override.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<OverrideType>))]
public enum OverrideType
{
/// <summary>
/// Override the computed severity.
/// </summary>
[JsonPropertyName("severity")]
Severity,
/// <summary>
/// Override the recommended action/decision.
/// </summary>
[JsonPropertyName("decision")]
Decision,
/// <summary>
/// Override a signal weight.
/// </summary>
[JsonPropertyName("weight")]
Weight,
/// <summary>
/// Exception that exempts from policy.
/// </summary>
[JsonPropertyName("exception")]
Exception
}
/// <summary>
/// Status of an override.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<OverrideStatus>))]
public enum OverrideStatus
{
[JsonPropertyName("active")]
Active,
[JsonPropertyName("disabled")]
Disabled,
[JsonPropertyName("expired")]
Expired,
[JsonPropertyName("superseded")]
Superseded
}
/// <summary>
/// Predicate for when an override applies.
/// </summary>
public sealed record OverridePredicate(
[property: JsonPropertyName("conditions")] IReadOnlyList<OverrideCondition> Conditions,
[property: JsonPropertyName("match_mode")] PredicateMatchMode MatchMode = PredicateMatchMode.All);
/// <summary>
/// A single condition in an override predicate.
/// </summary>
public sealed record OverrideCondition(
[property: JsonPropertyName("field")] string Field,
[property: JsonPropertyName("operator")] ConditionOperator Operator,
[property: JsonPropertyName("value")] object? Value);
/// <summary>
/// Condition operator.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ConditionOperator>))]
public enum ConditionOperator
{
[JsonPropertyName("eq")]
Equals,
[JsonPropertyName("neq")]
NotEquals,
[JsonPropertyName("gt")]
GreaterThan,
[JsonPropertyName("gte")]
GreaterThanOrEqual,
[JsonPropertyName("lt")]
LessThan,
[JsonPropertyName("lte")]
LessThanOrEqual,
[JsonPropertyName("in")]
In,
[JsonPropertyName("nin")]
NotIn,
[JsonPropertyName("contains")]
Contains,
[JsonPropertyName("regex")]
Regex
}
/// <summary>
/// Predicate match mode.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<PredicateMatchMode>))]
public enum PredicateMatchMode
{
/// <summary>
/// All conditions must match.
/// </summary>
[JsonPropertyName("all")]
All,
/// <summary>
/// Any condition must match.
/// </summary>
[JsonPropertyName("any")]
Any
}
/// <summary>
/// Action to take when override matches.
/// </summary>
public sealed record OverrideAction(
[property: JsonPropertyName("action_type")] OverrideActionType ActionType,
[property: JsonPropertyName("severity")] RiskSeverity? Severity = null,
[property: JsonPropertyName("decision")] RiskAction? Decision = null,
[property: JsonPropertyName("weight_factor")] double? WeightFactor = null,
[property: JsonPropertyName("reason")] string? Reason = null);
/// <summary>
/// Type of override action.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<OverrideActionType>))]
public enum OverrideActionType
{
[JsonPropertyName("set_severity")]
SetSeverity,
[JsonPropertyName("set_decision")]
SetDecision,
[JsonPropertyName("adjust_weight")]
AdjustWeight,
[JsonPropertyName("exempt")]
Exempt,
[JsonPropertyName("suppress")]
Suppress
}
/// <summary>
/// Audit metadata for an override.
/// </summary>
public sealed record OverrideAuditMetadata(
[property: JsonPropertyName("created_at")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("created_by")] string? CreatedBy,
[property: JsonPropertyName("reason")] string Reason,
[property: JsonPropertyName("justification")] string? Justification,
[property: JsonPropertyName("ticket_ref")] string? TicketRef,
[property: JsonPropertyName("approved_by")] string? ApprovedBy,
[property: JsonPropertyName("approved_at")] DateTimeOffset? ApprovedAt,
[property: JsonPropertyName("review_required")] bool ReviewRequired = false,
[property: JsonPropertyName("last_modified_at")] DateTimeOffset? LastModifiedAt = null,
[property: JsonPropertyName("last_modified_by")] string? LastModifiedBy = null);
/// <summary>
/// Request to create an override.
/// </summary>
public sealed record CreateOverrideRequest(
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("override_type")] OverrideType OverrideType,
[property: JsonPropertyName("predicate")] OverridePredicate Predicate,
[property: JsonPropertyName("action")] OverrideAction Action,
[property: JsonPropertyName("priority")] int? Priority,
[property: JsonPropertyName("reason")] string Reason,
[property: JsonPropertyName("justification")] string? Justification,
[property: JsonPropertyName("ticket_ref")] string? TicketRef,
[property: JsonPropertyName("expiration")] DateTimeOffset? Expiration,
[property: JsonPropertyName("tags")] IReadOnlyList<string>? Tags,
[property: JsonPropertyName("review_required")] bool ReviewRequired = false);
/// <summary>
/// Result of override conflict validation.
/// </summary>
public sealed record OverrideConflictValidation(
[property: JsonPropertyName("has_conflicts")] bool HasConflicts,
[property: JsonPropertyName("conflicts")] IReadOnlyList<OverrideConflict> Conflicts,
[property: JsonPropertyName("warnings")] IReadOnlyList<string> Warnings);
/// <summary>
/// Details of a conflict between overrides.
/// </summary>
public sealed record OverrideConflict(
[property: JsonPropertyName("override_id")] string OverrideId,
[property: JsonPropertyName("conflict_type")] ConflictType ConflictType,
[property: JsonPropertyName("description")] string Description,
[property: JsonPropertyName("resolution")] ConflictResolution Resolution);
/// <summary>
/// Type of conflict.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ConflictType>))]
public enum ConflictType
{
[JsonPropertyName("same_predicate")]
SamePredicate,
[JsonPropertyName("overlapping_predicate")]
OverlappingPredicate,
[JsonPropertyName("contradictory_action")]
ContradictoryAction,
[JsonPropertyName("priority_collision")]
PriorityCollision
}
/// <summary>
/// Resolution for a conflict.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ConflictResolution>))]
public enum ConflictResolution
{
[JsonPropertyName("higher_priority_wins")]
HigherPriorityWins,
[JsonPropertyName("newer_wins")]
NewerWins,
[JsonPropertyName("manual_review_required")]
ManualReviewRequired
}
/// <summary>
/// Override application record for audit trail.
/// </summary>
public sealed record OverrideApplicationRecord(
[property: JsonPropertyName("override_id")] string OverrideId,
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("applied_at")] DateTimeOffset AppliedAt,
[property: JsonPropertyName("original_value")] object? OriginalValue,
[property: JsonPropertyName("applied_value")] object? AppliedValue,
[property: JsonPropertyName("context")] Dictionary<string, object?> Context);

View File

@@ -0,0 +1,570 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace StellaOps.Policy.RiskProfile.Overrides;
/// <summary>
/// Service for managing overrides with audit metadata and conflict validation.
/// </summary>
public sealed class OverrideService
{
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<string, AuditedOverride> _overrides;
private readonly ConcurrentDictionary<string, List<string>> _profileIndex;
private readonly ConcurrentDictionary<string, List<OverrideApplicationRecord>> _applicationHistory;
public OverrideService(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_overrides = new ConcurrentDictionary<string, AuditedOverride>(StringComparer.OrdinalIgnoreCase);
_profileIndex = new ConcurrentDictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase);
_applicationHistory = new ConcurrentDictionary<string, List<OverrideApplicationRecord>>(StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Creates a new override with audit metadata.
/// </summary>
public AuditedOverride Create(CreateOverrideRequest request, string? createdBy = null)
{
ArgumentNullException.ThrowIfNull(request);
if (string.IsNullOrWhiteSpace(request.ProfileId))
{
throw new ArgumentException("ProfileId is required.");
}
if (string.IsNullOrWhiteSpace(request.Reason))
{
throw new ArgumentException("Reason is required for audit purposes.");
}
var now = _timeProvider.GetUtcNow();
var overrideId = GenerateOverrideId(request, now);
var audit = new OverrideAuditMetadata(
CreatedAt: now,
CreatedBy: createdBy,
Reason: request.Reason,
Justification: request.Justification,
TicketRef: request.TicketRef,
ApprovedBy: null,
ApprovedAt: null,
ReviewRequired: request.ReviewRequired);
var auditedOverride = new AuditedOverride(
OverrideId: overrideId,
ProfileId: request.ProfileId,
OverrideType: request.OverrideType,
Predicate: request.Predicate,
Action: request.Action,
Priority: request.Priority ?? 100,
Audit: audit,
Status: request.ReviewRequired ? OverrideStatus.Disabled : OverrideStatus.Active,
Expiration: request.Expiration,
Tags: request.Tags);
_overrides[overrideId] = auditedOverride;
IndexOverride(auditedOverride);
return auditedOverride;
}
/// <summary>
/// Gets an override by ID.
/// </summary>
public AuditedOverride? Get(string overrideId)
{
return _overrides.TryGetValue(overrideId, out var over) ? over : null;
}
/// <summary>
/// Lists overrides for a profile.
/// </summary>
public IReadOnlyList<AuditedOverride> ListByProfile(string profileId, bool includeInactive = false)
{
if (!_profileIndex.TryGetValue(profileId, out var ids))
{
return Array.Empty<AuditedOverride>();
}
var now = _timeProvider.GetUtcNow();
lock (ids)
{
var overrides = ids
.Select(id => _overrides.TryGetValue(id, out var o) ? o : null)
.Where(o => o != null)
.Cast<AuditedOverride>();
if (!includeInactive)
{
overrides = overrides.Where(o => IsActive(o, now));
}
return overrides
.OrderByDescending(o => o.Priority)
.ThenByDescending(o => o.Audit.CreatedAt)
.ToList()
.AsReadOnly();
}
}
/// <summary>
/// Validates an override for conflicts with existing overrides.
/// </summary>
public OverrideConflictValidation ValidateConflicts(CreateOverrideRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var conflicts = new List<OverrideConflict>();
var warnings = new List<string>();
var existingOverrides = ListByProfile(request.ProfileId, includeInactive: false);
foreach (var existing in existingOverrides)
{
// Check for same predicate
if (PredicatesEqual(request.Predicate, existing.Predicate))
{
conflicts.Add(new OverrideConflict(
OverrideId: existing.OverrideId,
ConflictType: ConflictType.SamePredicate,
Description: $"Override {existing.OverrideId} has identical predicate conditions.",
Resolution: ConflictResolution.HigherPriorityWins));
}
// Check for overlapping predicate
else if (PredicatesOverlap(request.Predicate, existing.Predicate))
{
warnings.Add($"Override may overlap with {existing.OverrideId}. Consider reviewing priority settings.");
// Check for contradictory actions
if (ActionsContradict(request.Action, existing.Action))
{
conflicts.Add(new OverrideConflict(
OverrideId: existing.OverrideId,
ConflictType: ConflictType.ContradictoryAction,
Description: $"Override {existing.OverrideId} has contradictory action for overlapping conditions.",
Resolution: ConflictResolution.ManualReviewRequired));
}
}
// Check for priority collision
if (request.Priority == existing.Priority && PredicatesOverlap(request.Predicate, existing.Predicate))
{
conflicts.Add(new OverrideConflict(
OverrideId: existing.OverrideId,
ConflictType: ConflictType.PriorityCollision,
Description: $"Override {existing.OverrideId} has same priority and overlapping conditions.",
Resolution: ConflictResolution.NewerWins));
}
}
return new OverrideConflictValidation(
HasConflicts: conflicts.Count > 0,
Conflicts: conflicts.AsReadOnly(),
Warnings: warnings.AsReadOnly());
}
/// <summary>
/// Approves an override that requires review.
/// </summary>
public AuditedOverride? Approve(string overrideId, string approvedBy)
{
if (!_overrides.TryGetValue(overrideId, out var existing))
{
return null;
}
if (!existing.Audit.ReviewRequired)
{
throw new InvalidOperationException("Override does not require approval.");
}
var now = _timeProvider.GetUtcNow();
var updated = existing with
{
Status = OverrideStatus.Active,
Audit = existing.Audit with
{
ApprovedBy = approvedBy,
ApprovedAt = now,
LastModifiedAt = now,
LastModifiedBy = approvedBy
}
};
_overrides[overrideId] = updated;
return updated;
}
/// <summary>
/// Disables an override.
/// </summary>
public AuditedOverride? Disable(string overrideId, string disabledBy, string? reason = null)
{
if (!_overrides.TryGetValue(overrideId, out var existing))
{
return null;
}
var now = _timeProvider.GetUtcNow();
var updated = existing with
{
Status = OverrideStatus.Disabled,
Audit = existing.Audit with
{
LastModifiedAt = now,
LastModifiedBy = disabledBy
}
};
_overrides[overrideId] = updated;
return updated;
}
/// <summary>
/// Deletes an override.
/// </summary>
public bool Delete(string overrideId)
{
if (_overrides.TryRemove(overrideId, out var removed))
{
RemoveFromIndex(removed);
return true;
}
return false;
}
/// <summary>
/// Records an override application for audit trail.
/// </summary>
public void RecordApplication(
string overrideId,
string findingId,
object? originalValue,
object? appliedValue,
Dictionary<string, object?>? context = null)
{
var record = new OverrideApplicationRecord(
OverrideId: overrideId,
FindingId: findingId,
AppliedAt: _timeProvider.GetUtcNow(),
OriginalValue: originalValue,
AppliedValue: appliedValue,
Context: context ?? new Dictionary<string, object?>());
var history = _applicationHistory.GetOrAdd(overrideId, _ => new List<OverrideApplicationRecord>());
lock (history)
{
history.Add(record);
// Keep only last 1000 records per override
if (history.Count > 1000)
{
history.RemoveRange(0, history.Count - 1000);
}
}
}
/// <summary>
/// Gets application history for an override.
/// </summary>
public IReadOnlyList<OverrideApplicationRecord> GetApplicationHistory(string overrideId, int limit = 100)
{
if (!_applicationHistory.TryGetValue(overrideId, out var history))
{
return Array.Empty<OverrideApplicationRecord>();
}
lock (history)
{
return history
.OrderByDescending(r => r.AppliedAt)
.Take(limit)
.ToList()
.AsReadOnly();
}
}
/// <summary>
/// Evaluates whether a finding matches an override's predicate.
/// </summary>
public bool EvaluatePredicate(OverridePredicate predicate, Dictionary<string, object?> signals)
{
if (predicate.Conditions.Count == 0)
{
return true;
}
var results = predicate.Conditions.Select(c => EvaluateCondition(c, signals));
return predicate.MatchMode == PredicateMatchMode.All
? results.All(r => r)
: results.Any(r => r);
}
private bool EvaluateCondition(OverrideCondition condition, Dictionary<string, object?> signals)
{
if (!signals.TryGetValue(condition.Field, out var actualValue))
{
return false;
}
return condition.Operator switch
{
ConditionOperator.Equals => ValuesEqual(actualValue, condition.Value),
ConditionOperator.NotEquals => !ValuesEqual(actualValue, condition.Value),
ConditionOperator.GreaterThan => CompareValues(actualValue, condition.Value) > 0,
ConditionOperator.GreaterThanOrEqual => CompareValues(actualValue, condition.Value) >= 0,
ConditionOperator.LessThan => CompareValues(actualValue, condition.Value) < 0,
ConditionOperator.LessThanOrEqual => CompareValues(actualValue, condition.Value) <= 0,
ConditionOperator.In => IsInCollection(actualValue, condition.Value),
ConditionOperator.NotIn => !IsInCollection(actualValue, condition.Value),
ConditionOperator.Contains => ContainsValue(actualValue, condition.Value),
ConditionOperator.Regex => MatchesRegex(actualValue, condition.Value),
_ => false
};
}
private bool IsActive(AuditedOverride over, DateTimeOffset asOf)
{
if (over.Status != OverrideStatus.Active)
{
return false;
}
if (over.Expiration.HasValue && asOf > over.Expiration.Value)
{
return false;
}
return true;
}
private static bool PredicatesEqual(OverridePredicate a, OverridePredicate b)
{
if (a.MatchMode != b.MatchMode)
{
return false;
}
if (a.Conditions.Count != b.Conditions.Count)
{
return false;
}
var aConditions = a.Conditions.OrderBy(c => c.Field).ThenBy(c => c.Operator.ToString()).ToList();
var bConditions = b.Conditions.OrderBy(c => c.Field).ThenBy(c => c.Operator.ToString()).ToList();
for (var i = 0; i < aConditions.Count; i++)
{
if (aConditions[i].Field != bConditions[i].Field ||
aConditions[i].Operator != bConditions[i].Operator ||
!ValuesEqual(aConditions[i].Value, bConditions[i].Value))
{
return false;
}
}
return true;
}
private static bool PredicatesOverlap(OverridePredicate a, OverridePredicate b)
{
// Simplified overlap check: if any fields match, consider them overlapping
var aFields = a.Conditions.Select(c => c.Field).ToHashSet(StringComparer.OrdinalIgnoreCase);
var bFields = b.Conditions.Select(c => c.Field).ToHashSet(StringComparer.OrdinalIgnoreCase);
return aFields.Overlaps(bFields);
}
private static bool ActionsContradict(OverrideAction a, OverrideAction b)
{
// Severity actions contradict if they set different severities
if (a.ActionType == OverrideActionType.SetSeverity &&
b.ActionType == OverrideActionType.SetSeverity &&
a.Severity != b.Severity)
{
return true;
}
// Decision actions contradict if they set different decisions
if (a.ActionType == OverrideActionType.SetDecision &&
b.ActionType == OverrideActionType.SetDecision &&
a.Decision != b.Decision)
{
return true;
}
// Exempt and Suppress contradict with any severity/decision setting
if ((a.ActionType == OverrideActionType.Exempt || a.ActionType == OverrideActionType.Suppress) &&
(b.ActionType == OverrideActionType.SetSeverity || b.ActionType == OverrideActionType.SetDecision))
{
return true;
}
if ((b.ActionType == OverrideActionType.Exempt || b.ActionType == OverrideActionType.Suppress) &&
(a.ActionType == OverrideActionType.SetSeverity || a.ActionType == OverrideActionType.SetDecision))
{
return true;
}
return false;
}
private static bool ValuesEqual(object? a, object? b)
{
if (a == null && b == null) return true;
if (a == null || b == null) return false;
if (a is JsonElement jeA && b is JsonElement jeB)
{
return jeA.GetRawText() == jeB.GetRawText();
}
var aStr = ConvertToString(a);
var bStr = ConvertToString(b);
return string.Equals(aStr, bStr, StringComparison.OrdinalIgnoreCase);
}
private static int CompareValues(object? a, object? b)
{
var aNum = ConvertToDouble(a);
var bNum = ConvertToDouble(b);
if (aNum.HasValue && bNum.HasValue)
{
return aNum.Value.CompareTo(bNum.Value);
}
var aStr = ConvertToString(a);
var bStr = ConvertToString(b);
return string.Compare(aStr, bStr, StringComparison.OrdinalIgnoreCase);
}
private static bool IsInCollection(object? actual, object? collection)
{
if (collection == null) return false;
IEnumerable<string>? items = null;
if (collection is JsonElement je && je.ValueKind == JsonValueKind.Array)
{
items = je.EnumerateArray().Select(e => ConvertToString(e));
}
else if (collection is IEnumerable<object> enumerable)
{
items = enumerable.Select(ConvertToString);
}
else if (collection is string str)
{
items = str.Split(',').Select(s => s.Trim());
}
if (items == null) return false;
var actualStr = ConvertToString(actual);
return items.Any(i => string.Equals(i, actualStr, StringComparison.OrdinalIgnoreCase));
}
private static bool ContainsValue(object? actual, object? search)
{
var actualStr = ConvertToString(actual);
var searchStr = ConvertToString(search);
return actualStr.Contains(searchStr, StringComparison.OrdinalIgnoreCase);
}
private static bool MatchesRegex(object? actual, object? pattern)
{
var actualStr = ConvertToString(actual);
var patternStr = ConvertToString(pattern);
try
{
return Regex.IsMatch(actualStr, patternStr, RegexOptions.IgnoreCase, TimeSpan.FromMilliseconds(100));
}
catch
{
return false;
}
}
private static string ConvertToString(object? value)
{
if (value == null) return string.Empty;
if (value is JsonElement je)
{
return je.ValueKind switch
{
JsonValueKind.String => je.GetString() ?? string.Empty,
JsonValueKind.Number => je.GetRawText(),
JsonValueKind.True => "true",
JsonValueKind.False => "false",
JsonValueKind.Null => string.Empty,
_ => je.GetRawText()
};
}
return value.ToString() ?? string.Empty;
}
private static double? ConvertToDouble(object? value)
{
if (value == null) return null;
if (value is JsonElement je && je.TryGetDouble(out var d))
{
return d;
}
if (value is double dVal) return dVal;
if (value is float fVal) return fVal;
if (value is int iVal) return iVal;
if (value is long lVal) return lVal;
if (value is decimal decVal) return (double)decVal;
if (value is string str && double.TryParse(str, out var parsed))
{
return parsed;
}
return null;
}
private void IndexOverride(AuditedOverride over)
{
var list = _profileIndex.GetOrAdd(over.ProfileId, _ => new List<string>());
lock (list)
{
if (!list.Contains(over.OverrideId))
{
list.Add(over.OverrideId);
}
}
}
private void RemoveFromIndex(AuditedOverride over)
{
if (_profileIndex.TryGetValue(over.ProfileId, out var list))
{
lock (list)
{
list.Remove(over.OverrideId);
}
}
}
private static string GenerateOverrideId(CreateOverrideRequest request, DateTimeOffset timestamp)
{
var seed = $"{request.ProfileId}|{request.OverrideType}|{timestamp:O}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"ovr-{Convert.ToHexStringLower(hash)[..16]}";
}
}

View File

@@ -0,0 +1,109 @@
using System.Text.Json.Serialization;
namespace StellaOps.Policy.RiskProfile.Scope;
/// <summary>
/// Represents an attachment of a risk profile to a scope (organization, project, environment).
/// </summary>
public sealed record ScopeAttachment(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("scope_type")] ScopeType ScopeType,
[property: JsonPropertyName("scope_id")] string ScopeId,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string ProfileVersion,
[property: JsonPropertyName("precedence")] int Precedence,
[property: JsonPropertyName("effective_from")] DateTimeOffset EffectiveFrom,
[property: JsonPropertyName("effective_until")] DateTimeOffset? EffectiveUntil,
[property: JsonPropertyName("created_at")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("created_by")] string? CreatedBy,
[property: JsonPropertyName("metadata")] Dictionary<string, string>? Metadata = null);
/// <summary>
/// Type of scope for profile attachment.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ScopeType>))]
public enum ScopeType
{
/// <summary>
/// Global scope - applies to all unless overridden.
/// </summary>
[JsonPropertyName("global")]
Global,
/// <summary>
/// Organization-level scope.
/// </summary>
[JsonPropertyName("organization")]
Organization,
/// <summary>
/// Project-level scope within an organization.
/// </summary>
[JsonPropertyName("project")]
Project,
/// <summary>
/// Environment-level scope (e.g., production, staging).
/// </summary>
[JsonPropertyName("environment")]
Environment,
/// <summary>
/// Component-level scope for specific packages/images.
/// </summary>
[JsonPropertyName("component")]
Component
}
/// <summary>
/// Request to create a scope attachment.
/// </summary>
public sealed record CreateScopeAttachmentRequest(
[property: JsonPropertyName("scope_type")] ScopeType ScopeType,
[property: JsonPropertyName("scope_id")] string ScopeId,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string? ProfileVersion,
[property: JsonPropertyName("precedence")] int? Precedence,
[property: JsonPropertyName("effective_from")] DateTimeOffset? EffectiveFrom,
[property: JsonPropertyName("effective_until")] DateTimeOffset? EffectiveUntil,
[property: JsonPropertyName("metadata")] Dictionary<string, string>? Metadata = null);
/// <summary>
/// Query for finding scope attachments.
/// </summary>
public sealed record ScopeAttachmentQuery(
[property: JsonPropertyName("scope_type")] ScopeType? ScopeType = null,
[property: JsonPropertyName("scope_id")] string? ScopeId = null,
[property: JsonPropertyName("profile_id")] string? ProfileId = null,
[property: JsonPropertyName("include_expired")] bool IncludeExpired = false,
[property: JsonPropertyName("limit")] int Limit = 100);
/// <summary>
/// Response containing resolved profile for a scope hierarchy.
/// </summary>
public sealed record ResolvedScopeProfile(
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string ProfileVersion,
[property: JsonPropertyName("resolved_from")] ScopeType ResolvedFrom,
[property: JsonPropertyName("scope_id")] string ScopeId,
[property: JsonPropertyName("attachment_id")] string AttachmentId,
[property: JsonPropertyName("inheritance_chain")] IReadOnlyList<ScopeAttachment> InheritanceChain);
/// <summary>
/// Scope selector for matching components to profiles.
/// </summary>
public sealed record ScopeSelector(
[property: JsonPropertyName("organization_id")] string? OrganizationId = null,
[property: JsonPropertyName("project_id")] string? ProjectId = null,
[property: JsonPropertyName("environment")] string? Environment = null,
[property: JsonPropertyName("component_purl")] string? ComponentPurl = null,
[property: JsonPropertyName("labels")] Dictionary<string, string>? Labels = null);
/// <summary>
/// Result of scope resolution.
/// </summary>
public sealed record ScopeResolutionResult(
[property: JsonPropertyName("selector")] ScopeSelector Selector,
[property: JsonPropertyName("resolved_profile")] ResolvedScopeProfile? ResolvedProfile,
[property: JsonPropertyName("applicable_attachments")] IReadOnlyList<ScopeAttachment> ApplicableAttachments,
[property: JsonPropertyName("resolution_time_ms")] double ResolutionTimeMs);

Some files were not shown because too many files have changed in this diff Show More