Add call graph fixtures for various languages and scenarios
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Reachability Corpus Validation / validate-corpus (push) Has been cancelled
Reachability Corpus Validation / validate-ground-truths (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Reachability Corpus Validation / determinism-check (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled

- Introduced `all-edge-reasons.json` to test edge resolution reasons in .NET.
- Added `all-visibility-levels.json` to validate method visibility levels in .NET.
- Created `dotnet-aspnetcore-minimal.json` for a minimal ASP.NET Core application.
- Included `go-gin-api.json` for a Go Gin API application structure.
- Added `java-spring-boot.json` for the Spring PetClinic application in Java.
- Introduced `legacy-no-schema.json` for legacy application structure without schema.
- Created `node-express-api.json` for an Express.js API application structure.
This commit is contained in:
master
2025-12-16 10:44:24 +02:00
parent 4391f35d8a
commit 5a480a3c2a
223 changed files with 19367 additions and 727 deletions

View File

@@ -3,6 +3,7 @@ using System.Text.Json;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AirGap.Importer.Telemetry;
namespace StellaOps.AirGap.Importer.Quarantine;
@@ -36,6 +37,8 @@ public sealed class FileSystemQuarantineService : IQuarantineService
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
ArgumentException.ThrowIfNullOrWhiteSpace(request.ReasonCode);
using var tenantScope = _logger.BeginTenantScope(request.TenantId);
if (!File.Exists(request.BundlePath))
{
return new QuarantineResult(
@@ -117,11 +120,12 @@ public sealed class FileSystemQuarantineService : IQuarantineService
cancellationToken).ConfigureAwait(false);
_logger.LogWarning(
"Bundle quarantined: tenant={TenantId} quarantineId={QuarantineId} reason={ReasonCode} path={Path}",
"offlinekit.quarantine created tenant_id={tenant_id} quarantine_id={quarantine_id} reason_code={reason_code} quarantine_path={quarantine_path} original_bundle={original_bundle}",
request.TenantId,
quarantineId,
request.ReasonCode,
quarantinePath);
quarantinePath,
Path.GetFileName(request.BundlePath));
return new QuarantineResult(
Success: true,
@@ -131,7 +135,12 @@ public sealed class FileSystemQuarantineService : IQuarantineService
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to quarantine bundle to {Path}", quarantinePath);
_logger.LogError(
ex,
"offlinekit.quarantine failed tenant_id={tenant_id} quarantine_id={quarantine_id} quarantine_path={quarantine_path}",
request.TenantId,
quarantineId,
quarantinePath);
return new QuarantineResult(
Success: false,
QuarantineId: quarantineId,
@@ -221,6 +230,8 @@ public sealed class FileSystemQuarantineService : IQuarantineService
ArgumentException.ThrowIfNullOrWhiteSpace(quarantineId);
ArgumentException.ThrowIfNullOrWhiteSpace(removalReason);
using var tenantScope = _logger.BeginTenantScope(tenantId);
var tenantRoot = Path.Combine(_options.QuarantineRoot, SanitizeForPathSegment(tenantId));
var entryPath = Path.Combine(tenantRoot, quarantineId);
if (!Directory.Exists(entryPath))
@@ -245,7 +256,7 @@ public sealed class FileSystemQuarantineService : IQuarantineService
Directory.Move(entryPath, removedPath);
_logger.LogInformation(
"Quarantine removed: tenant={TenantId} quarantineId={QuarantineId} removedPath={RemovedPath}",
"offlinekit.quarantine removed tenant_id={tenant_id} quarantine_id={quarantine_id} removed_path={removed_path}",
tenantId,
quarantineId,
removedPath);

View File

@@ -0,0 +1,194 @@
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Digest-keyed artifact index used by the evidence reconciliation flow.
/// Designed for deterministic ordering and replay.
/// </summary>
public sealed class ArtifactIndex
{
private readonly SortedDictionary<string, ArtifactEntry> _entries = new(StringComparer.Ordinal);
public void AddOrUpdate(ArtifactEntry entry)
{
ArgumentNullException.ThrowIfNull(entry);
AddOrUpdate(entry.Digest, entry);
}
public void AddOrUpdate(string digest, ArtifactEntry entry)
{
ArgumentNullException.ThrowIfNull(entry);
var normalizedDigest = NormalizeDigest(digest);
var normalizedEntry = entry with { Digest = normalizedDigest };
if (_entries.TryGetValue(normalizedDigest, out var existing))
{
_entries[normalizedDigest] = existing.Merge(normalizedEntry);
return;
}
_entries[normalizedDigest] = normalizedEntry;
}
public ArtifactEntry? Get(string digest)
{
var normalizedDigest = NormalizeDigest(digest);
return _entries.TryGetValue(normalizedDigest, out var entry) ? entry : null;
}
public IEnumerable<KeyValuePair<string, ArtifactEntry>> GetAll() => _entries;
public static string NormalizeDigest(string digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
throw new ArgumentException("Digest is required.", nameof(digest));
}
digest = digest.Trim();
const string prefix = "sha256:";
string hex;
if (digest.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
hex = digest[prefix.Length..];
}
else if (digest.Contains(':', StringComparison.Ordinal))
{
throw new FormatException($"Unsupported digest algorithm in '{digest}'. Only sha256 is supported.");
}
else
{
hex = digest;
}
hex = hex.Trim().ToLowerInvariant();
if (hex.Length != 64 || !IsLowerHex(hex.AsSpan()))
{
throw new FormatException($"Invalid sha256 digest '{digest}'. Expected 64 hex characters.");
}
return prefix + hex;
}
private static bool IsLowerHex(ReadOnlySpan<char> value)
{
foreach (var c in value)
{
if ((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f'))
{
continue;
}
return false;
}
return true;
}
}
public sealed record ArtifactEntry(
string Digest,
string? Name,
IReadOnlyList<SbomReference> Sboms,
IReadOnlyList<AttestationReference> Attestations,
IReadOnlyList<VexReference> VexDocuments)
{
public static ArtifactEntry Empty(string digest, string? name = null) =>
new(
digest,
name,
Array.Empty<SbomReference>(),
Array.Empty<AttestationReference>(),
Array.Empty<VexReference>());
public ArtifactEntry Merge(ArtifactEntry other)
{
ArgumentNullException.ThrowIfNull(other);
return this with
{
Name = ChooseName(Name, other.Name),
Sboms = MergeByContentHash(Sboms, other.Sboms, s => s.ContentHash, s => s.FilePath),
Attestations = MergeByContentHash(Attestations, other.Attestations, a => a.ContentHash, a => a.FilePath),
VexDocuments = MergeByContentHash(VexDocuments, other.VexDocuments, v => v.ContentHash, v => v.FilePath),
};
}
private static string? ChooseName(string? left, string? right)
{
if (left is null)
{
return right;
}
if (right is null)
{
return left;
}
return string.CompareOrdinal(left, right) <= 0 ? left : right;
}
private static IReadOnlyList<T> MergeByContentHash<T>(
IReadOnlyList<T> left,
IReadOnlyList<T> right,
Func<T, string> contentHashSelector,
Func<T, string> filePathSelector)
{
var merged = left
.Concat(right)
.OrderBy(x => contentHashSelector(x), StringComparer.Ordinal)
.ThenBy(x => filePathSelector(x), StringComparer.Ordinal)
.ToList();
return merged.DistinctBy(contentHashSelector).ToList();
}
}
public sealed record SbomReference(
string ContentHash,
string FilePath,
SbomFormat Format,
DateTimeOffset? CreatedAt);
public sealed record AttestationReference(
string ContentHash,
string FilePath,
string PredicateType,
IReadOnlyList<string> Subjects,
bool SignatureVerified,
bool TlogVerified,
string? RekorUuid);
public sealed record VexReference(
string ContentHash,
string FilePath,
VexFormat Format,
SourcePrecedence Precedence,
DateTimeOffset? Timestamp);
public enum SbomFormat
{
CycloneDx,
Spdx,
Unknown
}
public enum VexFormat
{
OpenVex,
CsafVex,
CycloneDxVex,
Unknown
}
public enum SourcePrecedence
{
Vendor = 1,
Maintainer = 2,
ThirdParty = 3,
Unknown = 99
}

View File

@@ -0,0 +1,89 @@
using System.Security.Cryptography;
namespace StellaOps.AirGap.Importer.Reconciliation;
public static class EvidenceDirectoryDiscovery
{
private static readonly string[] EvidenceRoots = new[] { "sboms", "attestations", "vex" };
public static IReadOnlyList<DiscoveredEvidenceFile> Discover(string evidenceDirectory)
{
if (string.IsNullOrWhiteSpace(evidenceDirectory))
{
throw new ArgumentException("Evidence directory is required.", nameof(evidenceDirectory));
}
if (!Directory.Exists(evidenceDirectory))
{
throw new DirectoryNotFoundException($"Evidence directory not found: {evidenceDirectory}");
}
var candidates = new List<(string FullPath, string RelativePath)>();
foreach (var root in EvidenceRoots)
{
var rootPath = Path.Combine(evidenceDirectory, root);
if (!Directory.Exists(rootPath))
{
continue;
}
foreach (var file in Directory.EnumerateFiles(rootPath, "*", SearchOption.AllDirectories))
{
var relative = NormalizeRelativePath(Path.GetRelativePath(evidenceDirectory, file));
candidates.Add((file, relative));
}
}
return candidates
.OrderBy(c => c.RelativePath, StringComparer.Ordinal)
.Select(c => new DiscoveredEvidenceFile(
RelativePath: c.RelativePath,
ContentSha256: ComputeSha256(c.FullPath),
Kind: Classify(c.RelativePath)))
.ToList();
}
private static string NormalizeRelativePath(string path) => path.Replace('\\', '/');
private static EvidenceFileKind Classify(string relativePath)
{
if (relativePath.StartsWith("sboms/", StringComparison.OrdinalIgnoreCase))
{
return EvidenceFileKind.Sbom;
}
if (relativePath.StartsWith("attestations/", StringComparison.OrdinalIgnoreCase))
{
return EvidenceFileKind.Attestation;
}
if (relativePath.StartsWith("vex/", StringComparison.OrdinalIgnoreCase))
{
return EvidenceFileKind.Vex;
}
return EvidenceFileKind.Unknown;
}
private static string ComputeSha256(string fullPath)
{
using var stream = File.OpenRead(fullPath);
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(stream);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
}
public enum EvidenceFileKind
{
Sbom,
Attestation,
Vex,
Unknown
}
public sealed record DiscoveredEvidenceFile(
string RelativePath,
string ContentSha256,
EvidenceFileKind Kind);

View File

@@ -0,0 +1,24 @@
namespace StellaOps.AirGap.Importer.Telemetry;
/// <summary>
/// Stable structured logging field names for Offline Kit / air-gap import flows.
/// </summary>
public static class OfflineKitLogFields
{
public const string TenantId = "tenant_id";
public const string BundleType = "bundle_type";
public const string BundleDigest = "bundle_digest";
public const string BundlePath = "bundle_path";
public const string ManifestVersion = "manifest_version";
public const string ManifestCreatedAt = "manifest_created_at";
public const string ForceActivate = "force_activate";
public const string ForceActivateReason = "force_activate_reason";
public const string Result = "result";
public const string ReasonCode = "reason_code";
public const string ReasonMessage = "reason_message";
public const string QuarantineId = "quarantine_id";
public const string QuarantinePath = "quarantine_path";
}

View File

@@ -0,0 +1,21 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.AirGap.Importer.Telemetry;
public static class OfflineKitLogScopes
{
public static IDisposable? BeginTenantScope(this ILogger logger, string tenantId)
{
ArgumentNullException.ThrowIfNull(logger);
if (string.IsNullOrWhiteSpace(tenantId))
{
return null;
}
return logger.BeginScope(new Dictionary<string, object?>(StringComparer.Ordinal)
{
[OfflineKitLogFields.TenantId] = tenantId
});
}
}

View File

@@ -0,0 +1,142 @@
using System.Diagnostics;
using System.Diagnostics.Metrics;
namespace StellaOps.AirGap.Importer.Telemetry;
/// <summary>
/// Metrics for Offline Kit operations.
/// </summary>
public sealed class OfflineKitMetrics : IDisposable
{
public const string MeterName = "StellaOps.AirGap.Importer";
public static class TagNames
{
public const string TenantId = "tenant_id";
public const string Status = "status";
public const string AttestationType = "attestation_type";
public const string Success = "success";
public const string Mode = "mode";
public const string Reason = "reason";
}
private readonly Meter _meter;
private readonly Counter<long> _importTotal;
private readonly Histogram<double> _attestationVerifyLatencySeconds;
private readonly Counter<long> _rekorSuccessTotal;
private readonly Counter<long> _rekorRetryTotal;
private readonly Histogram<double> _rekorInclusionLatencySeconds;
private bool _disposed;
public OfflineKitMetrics(IMeterFactory? meterFactory = null)
{
_meter = meterFactory?.Create(MeterName, version: "1.0.0") ?? new Meter(MeterName, "1.0.0");
_importTotal = _meter.CreateCounter<long>(
name: "offlinekit_import_total",
unit: "{imports}",
description: "Total number of offline kit import attempts");
_attestationVerifyLatencySeconds = _meter.CreateHistogram<double>(
name: "offlinekit_attestation_verify_latency_seconds",
unit: "s",
description: "Time taken to verify attestations during import");
_rekorSuccessTotal = _meter.CreateCounter<long>(
name: "attestor_rekor_success_total",
unit: "{verifications}",
description: "Successful Rekor verification count");
_rekorRetryTotal = _meter.CreateCounter<long>(
name: "attestor_rekor_retry_total",
unit: "{retries}",
description: "Rekor verification retry count");
_rekorInclusionLatencySeconds = _meter.CreateHistogram<double>(
name: "rekor_inclusion_latency",
unit: "s",
description: "Time to verify Rekor inclusion proof");
}
public void RecordImport(string status, string tenantId)
{
if (string.IsNullOrWhiteSpace(status))
{
status = "unknown";
}
if (string.IsNullOrWhiteSpace(tenantId))
{
tenantId = "unknown";
}
_importTotal.Add(1, new TagList
{
{ TagNames.Status, status },
{ TagNames.TenantId, tenantId }
});
}
public void RecordAttestationVerifyLatency(string attestationType, double seconds, bool success)
{
if (string.IsNullOrWhiteSpace(attestationType))
{
attestationType = "unknown";
}
if (seconds < 0)
{
seconds = 0;
}
_attestationVerifyLatencySeconds.Record(seconds, new TagList
{
{ TagNames.AttestationType, attestationType },
{ TagNames.Success, success ? "true" : "false" }
});
}
public void RecordRekorSuccess(string mode)
{
if (string.IsNullOrWhiteSpace(mode))
{
mode = "unknown";
}
_rekorSuccessTotal.Add(1, new TagList { { TagNames.Mode, mode } });
}
public void RecordRekorRetry(string reason)
{
if (string.IsNullOrWhiteSpace(reason))
{
reason = "unknown";
}
_rekorRetryTotal.Add(1, new TagList { { TagNames.Reason, reason } });
}
public void RecordRekorInclusionLatency(double seconds, bool success)
{
if (seconds < 0)
{
seconds = 0;
}
_rekorInclusionLatencySeconds.Record(seconds, new TagList
{
{ TagNames.Success, success ? "true" : "false" }
});
}
public void Dispose()
{
if (_disposed)
{
return;
}
_meter.Dispose();
_disposed = true;
}
}

View File

@@ -1,5 +1,6 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.AirGap.Importer.Contracts;
namespace StellaOps.AirGap.Importer.Validation;
@@ -13,13 +14,24 @@ public sealed class DsseVerifier
{
private const string PaePrefix = "DSSEv1";
public BundleValidationResult Verify(DsseEnvelope envelope, TrustRootConfig trustRoots)
public BundleValidationResult Verify(DsseEnvelope envelope, TrustRootConfig trustRoots, ILogger? logger = null)
{
if (trustRoots.TrustedKeyFingerprints.Count == 0 || trustRoots.PublicKeys.Count == 0)
{
logger?.LogWarning(
"offlinekit.dsse.verify failed reason_code={reason_code} trusted_fingerprints={trusted_fingerprints} public_keys={public_keys}",
"TRUST_ROOTS_REQUIRED",
trustRoots.TrustedKeyFingerprints.Count,
trustRoots.PublicKeys.Count);
return BundleValidationResult.Failure("trust-roots-required");
}
logger?.LogDebug(
"offlinekit.dsse.verify start payload_type={payload_type} signatures={signatures} public_keys={public_keys}",
envelope.PayloadType,
envelope.Signatures.Count,
trustRoots.PublicKeys.Count);
foreach (var signature in envelope.Signatures)
{
if (!trustRoots.PublicKeys.TryGetValue(signature.KeyId, out var keyBytes))
@@ -36,10 +48,20 @@ public sealed class DsseVerifier
var pae = BuildPreAuthEncoding(envelope.PayloadType, envelope.Payload);
if (TryVerifyRsaPss(keyBytes, pae, signature.Signature))
{
logger?.LogInformation(
"offlinekit.dsse.verify succeeded key_id={key_id} fingerprint={fingerprint} payload_type={payload_type}",
signature.KeyId,
fingerprint,
envelope.PayloadType);
return BundleValidationResult.Success("dsse-signature-verified");
}
}
logger?.LogWarning(
"offlinekit.dsse.verify failed reason_code={reason_code} signatures={signatures} public_keys={public_keys}",
"DSSE_SIGNATURE_INVALID",
envelope.Signatures.Count,
trustRoots.PublicKeys.Count);
return BundleValidationResult.Failure("dsse-signature-untrusted-or-invalid");
}

View File

@@ -1,6 +1,7 @@
using Microsoft.Extensions.Logging;
using StellaOps.AirGap.Importer.Contracts;
using StellaOps.AirGap.Importer.Quarantine;
using StellaOps.AirGap.Importer.Telemetry;
using StellaOps.AirGap.Importer.Versioning;
namespace StellaOps.AirGap.Importer.Validation;
@@ -46,6 +47,7 @@ public sealed class ImportValidator
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundleDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(request.ManifestVersion);
using var tenantScope = _logger.BeginTenantScope(request.TenantId);
var verificationLog = new List<string>(capacity: 16);
var tufResult = _tuf.Validate(request.RootJson, request.SnapshotJson, request.TimestampJson);
@@ -53,16 +55,30 @@ public sealed class ImportValidator
{
var failed = tufResult with { Reason = $"tuf:{tufResult.Reason}" };
verificationLog.Add(failed.Reason);
_logger.LogWarning(
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"TUF_INVALID",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
verificationLog.Add($"tuf:{tufResult.Reason}");
var dsseResult = _dsse.Verify(request.Envelope, request.TrustRoots);
var dsseResult = _dsse.Verify(request.Envelope, request.TrustRoots, _logger);
if (!dsseResult.IsValid)
{
var failed = dsseResult with { Reason = $"dsse:{dsseResult.Reason}" };
verificationLog.Add(failed.Reason);
_logger.LogWarning(
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"DSSE_INVALID",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
@@ -73,6 +89,13 @@ public sealed class ImportValidator
{
var failed = BundleValidationResult.Failure("merkle-empty");
verificationLog.Add(failed.Reason);
_logger.LogWarning(
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"HASH_MISMATCH",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
@@ -83,6 +106,13 @@ public sealed class ImportValidator
{
var failed = rotationResult with { Reason = $"rotation:{rotationResult.Reason}" };
verificationLog.Add(failed.Reason);
_logger.LogWarning(
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"ROTATION_INVALID",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
@@ -97,6 +127,14 @@ public sealed class ImportValidator
{
var failed = BundleValidationResult.Failure($"manifest-version-parse-failed:{ex.GetType().Name.ToLowerInvariant()}");
verificationLog.Add(failed.Reason);
_logger.LogWarning(
ex,
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"VERSION_PARSE_FAILED",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
@@ -112,6 +150,13 @@ public sealed class ImportValidator
var failed = BundleValidationResult.Failure(
$"version-non-monotonic:incoming={incomingVersion.SemVer}:current={monotonicity.CurrentVersion?.SemVer ?? "(none)"}");
verificationLog.Add(failed.Reason);
_logger.LogWarning(
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"VERSION_NON_MONOTONIC",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
@@ -122,14 +167,22 @@ public sealed class ImportValidator
{
var failed = BundleValidationResult.Failure("force-activate-reason-required");
verificationLog.Add(failed.Reason);
_logger.LogWarning(
"offlinekit.import.validation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} reason_code={reason_code} reason_message={reason_message}",
request.TenantId,
request.BundleType,
request.BundleDigest,
"FORCE_ACTIVATE_REASON_REQUIRED",
failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
_logger.LogWarning(
"Non-monotonic activation forced: tenant={TenantId} bundleType={BundleType} incoming={Incoming} current={Current} reason={Reason}",
"offlinekit.import.force_activation tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} incoming_version={incoming_version} current_version={current_version} force_activate_reason={force_activate_reason}",
request.TenantId,
request.BundleType,
request.BundleDigest,
incomingVersion.SemVer,
monotonicity.CurrentVersion?.SemVer,
request.ForceActivateReason);
@@ -148,13 +201,25 @@ public sealed class ImportValidator
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to record bundle activation for tenant={TenantId} bundleType={BundleType}", request.TenantId, request.BundleType);
_logger.LogError(
ex,
"offlinekit.import.activation failed tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest}",
request.TenantId,
request.BundleType,
request.BundleDigest);
var failed = BundleValidationResult.Failure($"version-store-write-failed:{ex.GetType().Name.ToLowerInvariant()}");
verificationLog.Add(failed.Reason);
await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false);
return failed;
}
_logger.LogInformation(
"offlinekit.import.validation succeeded tenant_id={tenant_id} bundle_type={bundle_type} bundle_digest={bundle_digest} manifest_version={manifest_version} force_activate={force_activate}",
request.TenantId,
request.BundleType,
request.BundleDigest,
request.ManifestVersion,
request.ForceActivate);
return BundleValidationResult.Success("import-validated");
}
@@ -199,7 +264,7 @@ public sealed class ImportValidator
if (!quarantine.Success)
{
_logger.LogError(
"Failed to quarantine bundle for tenant={TenantId} path={BundlePath} error={Error}",
"offlinekit.import.quarantine failed tenant_id={tenant_id} bundle_path={bundle_path} reason_code={reason_code}",
request.TenantId,
request.BundlePath,
quarantine.ErrorMessage);
@@ -207,7 +272,11 @@ public sealed class ImportValidator
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to quarantine bundle for tenant={TenantId} path={BundlePath}", request.TenantId, request.BundlePath);
_logger.LogError(
ex,
"offlinekit.import.quarantine failed tenant_id={tenant_id} bundle_path={bundle_path}",
request.TenantId,
request.BundlePath);
}
}
}

View File

@@ -19,3 +19,5 @@
| MR-T10.6.2 | DONE | DI simplified to register in-memory air-gap state store (no Mongo options or client). | 2025-12-11 |
| MR-T10.6.3 | DONE | Converted controller tests to in-memory store; dropped Mongo2Go dependency. | 2025-12-11 |
| AIRGAP-IMP-0338 | DONE | Implemented monotonicity enforcement + quarantine service (version primitives/checker, Postgres version store, importer validator integration, unit/integration tests). | 2025-12-15 |
| AIRGAP-OBS-0341-001 | DONE | Sprint 0341: OfflineKit metrics + structured logging fields/scopes in Importer; DSSE/quarantine logs aligned; metrics tests passing. | 2025-12-15 |
| AIRGAP-IMP-0342 | DOING | Sprint 0342: deterministic evidence reconciliation primitives per advisory §5 (ArtifactIndex/normalization first); tests pending. | 2025-12-15 |

View File

@@ -0,0 +1,29 @@
-- Authority Schema Migration 004: Offline Kit Audit
-- Sprint: SPRINT_0341_0001_0001 - Observability & Audit Enhancements
-- Purpose: Store structured Offline Kit import/activation audit events per advisory §13.2.
CREATE TABLE IF NOT EXISTS authority.offline_kit_audit (
event_id UUID PRIMARY KEY,
tenant_id TEXT NOT NULL,
event_type TEXT NOT NULL,
timestamp TIMESTAMPTZ NOT NULL,
actor TEXT NOT NULL,
details JSONB NOT NULL,
result TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_offline_kit_audit_ts ON authority.offline_kit_audit(timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_offline_kit_audit_type ON authority.offline_kit_audit(event_type);
CREATE INDEX IF NOT EXISTS idx_offline_kit_audit_tenant_ts ON authority.offline_kit_audit(tenant_id, timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_offline_kit_audit_result ON authority.offline_kit_audit(tenant_id, result, timestamp DESC);
-- RLS (authority_app.require_current_tenant was introduced in migration 003_enable_rls.sql)
ALTER TABLE authority.offline_kit_audit ENABLE ROW LEVEL SECURITY;
ALTER TABLE authority.offline_kit_audit FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS offline_kit_audit_tenant_isolation ON authority.offline_kit_audit;
CREATE POLICY offline_kit_audit_tenant_isolation ON authority.offline_kit_audit
FOR ALL
USING (tenant_id = authority_app.require_current_tenant())
WITH CHECK (tenant_id = authority_app.require_current_tenant());

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Authority.Storage.Postgres.Models;
/// <summary>
/// Represents an Offline Kit audit record.
/// </summary>
public sealed class OfflineKitAuditEntity
{
public required Guid EventId { get; init; }
public required string TenantId { get; init; }
public required string EventType { get; init; }
public DateTimeOffset Timestamp { get; init; }
public required string Actor { get; init; }
public required string Details { get; init; }
public required string Result { get; init; }
}

View File

@@ -0,0 +1,9 @@
using StellaOps.Authority.Storage.Postgres.Models;
namespace StellaOps.Authority.Storage.Postgres.Repositories;
public interface IOfflineKitAuditEmitter
{
Task RecordAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,17 @@
using StellaOps.Authority.Storage.Postgres.Models;
namespace StellaOps.Authority.Storage.Postgres.Repositories;
public interface IOfflineKitAuditRepository
{
Task InsertAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default);
Task<IReadOnlyList<OfflineKitAuditEntity>> ListAsync(
string tenantId,
string? eventType = null,
string? result = null,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,40 @@
using Microsoft.Extensions.Logging;
using StellaOps.Authority.Storage.Postgres.Models;
namespace StellaOps.Authority.Storage.Postgres.Repositories;
/// <summary>
/// Emits Offline Kit audit records to PostgreSQL.
/// Audit failures should not break import flows.
/// </summary>
public sealed class OfflineKitAuditEmitter : IOfflineKitAuditEmitter
{
private readonly IOfflineKitAuditRepository _repository;
private readonly ILogger<OfflineKitAuditEmitter> _logger;
public OfflineKitAuditEmitter(IOfflineKitAuditRepository repository, ILogger<OfflineKitAuditEmitter> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task RecordAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entity);
try
{
await _repository.InsertAsync(entity, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(
ex,
"offlinekit.audit.record failed tenant_id={tenant_id} event_type={event_type} event_id={event_id}",
entity.TenantId,
entity.EventType,
entity.EventId);
}
}
}

View File

@@ -0,0 +1,103 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Authority.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Authority.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for Offline Kit audit records.
/// </summary>
public sealed class OfflineKitAuditRepository : RepositoryBase<AuthorityDataSource>, IOfflineKitAuditRepository
{
public OfflineKitAuditRepository(AuthorityDataSource dataSource, ILogger<OfflineKitAuditRepository> logger)
: base(dataSource, logger)
{
}
public async Task InsertAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entity);
ArgumentException.ThrowIfNullOrWhiteSpace(entity.TenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(entity.EventType);
ArgumentException.ThrowIfNullOrWhiteSpace(entity.Actor);
ArgumentException.ThrowIfNullOrWhiteSpace(entity.Details);
ArgumentException.ThrowIfNullOrWhiteSpace(entity.Result);
const string sql = """
INSERT INTO authority.offline_kit_audit
(event_id, tenant_id, event_type, timestamp, actor, details, result)
VALUES (@event_id, @tenant_id, @event_type, @timestamp, @actor, @details::jsonb, @result)
""";
await ExecuteAsync(
tenantId: entity.TenantId,
sql: sql,
configureCommand: cmd =>
{
AddParameter(cmd, "event_id", entity.EventId);
AddParameter(cmd, "tenant_id", entity.TenantId);
AddParameter(cmd, "event_type", entity.EventType);
AddParameter(cmd, "timestamp", entity.Timestamp);
AddParameter(cmd, "actor", entity.Actor);
AddJsonbParameter(cmd, "details", entity.Details);
AddParameter(cmd, "result", entity.Result);
},
cancellationToken: cancellationToken).ConfigureAwait(false);
}
public async Task<IReadOnlyList<OfflineKitAuditEntity>> ListAsync(
string tenantId,
string? eventType = null,
string? result = null,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
limit = Math.Clamp(limit, 1, 1000);
offset = Math.Max(0, offset);
var (whereClause, whereParameters) = BuildWhereClause(
("tenant_id = @tenant_id", "tenant_id", tenantId, include: true),
("event_type = @event_type", "event_type", eventType, include: !string.IsNullOrWhiteSpace(eventType)),
("result = @result", "result", result, include: !string.IsNullOrWhiteSpace(result)));
var sql = $"""
SELECT event_id, tenant_id, event_type, timestamp, actor, details, result
FROM authority.offline_kit_audit
{whereClause}
ORDER BY timestamp DESC, event_id DESC
LIMIT @limit OFFSET @offset
""";
return await QueryAsync(
tenantId: tenantId,
sql: sql,
configureCommand: cmd =>
{
foreach (var (name, value) in whereParameters)
{
AddParameter(cmd, name, value);
}
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
mapRow: MapAudit,
cancellationToken: cancellationToken).ConfigureAwait(false);
}
private static OfflineKitAuditEntity MapAudit(NpgsqlDataReader reader) => new()
{
EventId = reader.GetGuid(0),
TenantId = reader.GetString(1),
EventType = reader.GetString(2),
Timestamp = reader.GetFieldValue<DateTimeOffset>(3),
Actor = reader.GetString(4),
Details = reader.GetString(5),
Result = reader.GetString(6)
};
}

View File

@@ -75,6 +75,9 @@ public static class ServiceCollectionExtensions
services.AddScoped<LoginAttemptRepository>();
services.AddScoped<OidcTokenRepository>();
services.AddScoped<AirgapAuditRepository>();
services.AddScoped<OfflineKitAuditRepository>();
services.AddScoped<IOfflineKitAuditRepository>(sp => sp.GetRequiredService<OfflineKitAuditRepository>());
services.AddScoped<IOfflineKitAuditEmitter, OfflineKitAuditEmitter>();
services.AddScoped<RevocationExportStateRepository>();
}
}

View File

@@ -0,0 +1,127 @@
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Authority.Storage.Postgres.Models;
using StellaOps.Authority.Storage.Postgres.Repositories;
using Xunit;
namespace StellaOps.Authority.Storage.Postgres.Tests;
[Collection(AuthorityPostgresCollection.Name)]
public sealed class OfflineKitAuditRepositoryTests : IAsyncLifetime
{
private readonly AuthorityPostgresFixture _fixture;
private readonly OfflineKitAuditRepository _repository;
public OfflineKitAuditRepositoryTests(AuthorityPostgresFixture fixture)
{
_fixture = fixture;
var options = fixture.Fixture.CreateOptions();
options.SchemaName = fixture.SchemaName;
var dataSource = new AuthorityDataSource(Options.Create(options), NullLogger<AuthorityDataSource>.Instance);
_repository = new OfflineKitAuditRepository(dataSource, NullLogger<OfflineKitAuditRepository>.Instance);
}
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
public Task DisposeAsync() => Task.CompletedTask;
[Fact]
public async Task Insert_ThenList_ReturnsRecord()
{
var tenantId = Guid.NewGuid().ToString("N");
var entity = new OfflineKitAuditEntity
{
EventId = Guid.NewGuid(),
TenantId = tenantId,
EventType = "IMPORT_VALIDATED",
Timestamp = DateTimeOffset.UtcNow,
Actor = "system",
Details = """{"kitFilename":"bundle-2025-12-14.tar.zst"}""",
Result = "success"
};
await _repository.InsertAsync(entity);
var listed = await _repository.ListAsync(tenantId, limit: 10);
listed.Should().ContainSingle();
listed[0].EventId.Should().Be(entity.EventId);
listed[0].EventType.Should().Be(entity.EventType);
listed[0].Actor.Should().Be(entity.Actor);
listed[0].Result.Should().Be(entity.Result);
listed[0].Details.Should().Contain("kitFilename");
}
[Fact]
public async Task List_WithFilters_ReturnsMatchingRows()
{
var tenantId = Guid.NewGuid().ToString("N");
await _repository.InsertAsync(new OfflineKitAuditEntity
{
EventId = Guid.NewGuid(),
TenantId = tenantId,
EventType = "IMPORT_FAILED_DSSE",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-2),
Actor = "system",
Details = """{"reasonCode":"DSSE_VERIFY_FAIL"}""",
Result = "failed"
});
await _repository.InsertAsync(new OfflineKitAuditEntity
{
EventId = Guid.NewGuid(),
TenantId = tenantId,
EventType = "IMPORT_VALIDATED",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-1),
Actor = "system",
Details = """{"status":"ok"}""",
Result = "success"
});
var failed = await _repository.ListAsync(tenantId, result: "failed", limit: 10);
failed.Should().ContainSingle();
failed[0].Result.Should().Be("failed");
var validated = await _repository.ListAsync(tenantId, eventType: "IMPORT_VALIDATED", limit: 10);
validated.Should().ContainSingle();
validated[0].EventType.Should().Be("IMPORT_VALIDATED");
}
[Fact]
public async Task List_IsTenantIsolated()
{
var tenantA = Guid.NewGuid().ToString("N");
var tenantB = Guid.NewGuid().ToString("N");
await _repository.InsertAsync(new OfflineKitAuditEntity
{
EventId = Guid.NewGuid(),
TenantId = tenantA,
EventType = "IMPORT_VALIDATED",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-1),
Actor = "system",
Details = """{"status":"ok"}""",
Result = "success"
});
await _repository.InsertAsync(new OfflineKitAuditEntity
{
EventId = Guid.NewGuid(),
TenantId = tenantB,
EventType = "IMPORT_VALIDATED",
Timestamp = DateTimeOffset.UtcNow,
Actor = "system",
Details = """{"status":"ok"}""",
Result = "success"
});
var tenantAResults = await _repository.ListAsync(tenantA, limit: 10);
tenantAResults.Should().ContainSingle();
tenantAResults[0].TenantId.Should().Be(tenantA);
var tenantBResults = await _repository.ListAsync(tenantB, limit: 10);
tenantBResults.Should().ContainSingle();
tenantBResults[0].TenantId.Should().Be(tenantB);
}
}

View File

@@ -4,7 +4,7 @@ using System.Collections.Concurrent;
namespace StellaOps.Authority.Storage.Postgres.Tests.TestDoubles;
internal sealed class InMemoryTokenRepository : ITokenRepository, ISecondaryTokenRepository
internal sealed class InMemoryTokenRepository : ITokenRepository
{
private readonly ConcurrentDictionary<Guid, TokenEntity> _tokens = new();
public bool FailWrites { get; set; }
@@ -67,7 +67,7 @@ internal sealed class InMemoryTokenRepository : ITokenRepository, ISecondaryToke
public IReadOnlyCollection<TokenEntity> Snapshot() => _tokens.Values.ToList();
}
internal sealed class InMemoryRefreshTokenRepository : IRefreshTokenRepository, ISecondaryRefreshTokenRepository
internal sealed class InMemoryRefreshTokenRepository : IRefreshTokenRepository
{
private readonly ConcurrentDictionary<Guid, RefreshTokenEntity> _tokens = new();
public bool FailWrites { get; set; }
@@ -130,7 +130,7 @@ internal sealed class InMemoryRefreshTokenRepository : IRefreshTokenRepository,
public IReadOnlyCollection<RefreshTokenEntity> Snapshot() => _tokens.Values.ToList();
}
internal sealed class InMemoryUserRepository : IUserRepository, ISecondaryUserRepository
internal sealed class InMemoryUserRepository : IUserRepository
{
private readonly ConcurrentDictionary<Guid, UserEntity> _users = new();

View File

@@ -80,6 +80,7 @@ internal static class CommandFactory
root.Add(BuildSdkCommand(services, verboseOption, cancellationToken));
root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken));
root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken));
root.Add(OfflineCommandGroup.BuildOfflineCommand(services, verboseOption, cancellationToken));
root.Add(BuildDevPortalCommand(services, verboseOption, cancellationToken));
root.Add(BuildSymbolsCommand(services, verboseOption, cancellationToken));
root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken));
@@ -9338,6 +9339,53 @@ internal static class CommandFactory
start.Add(startAttestation);
export.Add(start);
var cache = new Command("cache", "Local evidence cache operations.");
var scanOutputPathOption = new Option<string>("--scan-output", new[] { "-p" })
{
Description = "Path to scan output directory containing a local evidence cache (.evidence).",
Required = true
};
var cacheStats = new Command("stats", "Show local evidence cache statistics.");
cacheStats.Add(scanOutputPathOption);
cacheStats.Add(jsonOption);
cacheStats.Add(verboseOption);
cacheStats.SetAction((parseResult, _) =>
{
var scanOutputPath = parseResult.GetValue(scanOutputPathOption) ?? string.Empty;
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportCacheStatsAsync(
services,
scanOutputPath,
json,
verbose,
cancellationToken);
});
var cacheProcessQueue = new Command("process-queue", "Process deferred enrichment queue for local evidence cache.");
cacheProcessQueue.Add(scanOutputPathOption);
cacheProcessQueue.Add(jsonOption);
cacheProcessQueue.Add(verboseOption);
cacheProcessQueue.SetAction((parseResult, _) =>
{
var scanOutputPath = parseResult.GetValue(scanOutputPathOption) ?? string.Empty;
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportCacheProcessQueueAsync(
services,
scanOutputPath,
json,
verbose,
cancellationToken);
});
cache.Add(cacheStats);
cache.Add(cacheProcessQueue);
export.Add(cache);
return export;
}

View File

@@ -0,0 +1,113 @@
using System.Globalization;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using Spectre.Console;
using StellaOps.ExportCenter.Core.EvidenceCache;
namespace StellaOps.Cli.Commands;
internal static partial class CommandHandlers
{
internal static async Task<int> HandleExportCacheStatsAsync(
IServiceProvider services,
string scanOutputPath,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
if (string.IsNullOrWhiteSpace(scanOutputPath))
{
AnsiConsole.MarkupLine("[red]Scan output path is required.[/]");
return 1;
}
scanOutputPath = Path.GetFullPath(scanOutputPath);
if (!Directory.Exists(scanOutputPath))
{
AnsiConsole.MarkupLine($"[red]Scan output directory not found:[/] {Markup.Escape(scanOutputPath)}");
return 1;
}
var cache = services.GetRequiredService<IEvidenceCacheService>();
var statistics = await cache.GetStatisticsAsync(scanOutputPath, cancellationToken).ConfigureAwait(false);
if (json)
{
var payload = new
{
scanOutput = scanOutputPath,
statistics
};
AnsiConsole.WriteLine(JsonSerializer.Serialize(payload, JsonOptions));
return 0;
}
if (statistics.TotalBundles == 0)
{
AnsiConsole.MarkupLine("[yellow]No evidence cache entries found.[/]");
}
var table = new Table().AddColumns("Field", "Value");
table.AddRow("Scan output", Markup.Escape(scanOutputPath));
table.AddRow("Total bundles", statistics.TotalBundles.ToString(CultureInfo.InvariantCulture));
table.AddRow("Fully available", statistics.FullyAvailable.ToString(CultureInfo.InvariantCulture));
table.AddRow("Partially available", statistics.PartiallyAvailable.ToString(CultureInfo.InvariantCulture));
table.AddRow("Pending enrichment", statistics.PendingEnrichment.ToString(CultureInfo.InvariantCulture));
table.AddRow("Offline resolvable", FormattableString.Invariant($"{statistics.OfflineResolvablePercentage:0.##}%"));
table.AddRow("Total size", FormatBytes(statistics.TotalSizeBytes));
AnsiConsole.Write(table);
return 0;
}
internal static async Task<int> HandleExportCacheProcessQueueAsync(
IServiceProvider services,
string scanOutputPath,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
if (string.IsNullOrWhiteSpace(scanOutputPath))
{
AnsiConsole.MarkupLine("[red]Scan output path is required.[/]");
return 1;
}
scanOutputPath = Path.GetFullPath(scanOutputPath);
if (!Directory.Exists(scanOutputPath))
{
AnsiConsole.MarkupLine($"[red]Scan output directory not found:[/] {Markup.Escape(scanOutputPath)}");
return 1;
}
var cache = services.GetRequiredService<IEvidenceCacheService>();
var result = await cache.ProcessEnrichmentQueueAsync(scanOutputPath, cancellationToken).ConfigureAwait(false);
if (json)
{
var payload = new
{
scanOutput = scanOutputPath,
result
};
AnsiConsole.WriteLine(JsonSerializer.Serialize(payload, JsonOptions));
return 0;
}
var table = new Table().AddColumns("Field", "Value");
table.AddRow("Scan output", Markup.Escape(scanOutputPath));
table.AddRow("Processed", result.ProcessedCount.ToString(CultureInfo.InvariantCulture));
table.AddRow("Failed", result.FailedCount.ToString(CultureInfo.InvariantCulture));
table.AddRow("Remaining", result.RemainingCount.ToString(CultureInfo.InvariantCulture));
AnsiConsole.Write(table);
return 0;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -49,10 +49,14 @@ using StellaOps.Scanner.Analyzers.Lang.Php;
using StellaOps.Scanner.Analyzers.Lang.Bun;
using StellaOps.Policy;
using StellaOps.PolicyDsl;
using StellaOps.AirGap.Importer.Contracts;
using StellaOps.AirGap.Importer.Quarantine;
using StellaOps.AirGap.Importer.Validation;
using StellaOps.AirGap.Importer.Versioning;
namespace StellaOps.Cli.Commands;
internal static class CommandHandlers
internal static partial class CommandHandlers
{
private const string KmsPassphraseEnvironmentVariable = "STELLAOPS_KMS_PASSPHRASE";
private static readonly JsonSerializerOptions KmsJsonOptions = new(JsonSerializerDefaults.Web)

View File

@@ -0,0 +1,164 @@
using System.CommandLine;
using StellaOps.Cli.Extensions;
namespace StellaOps.Cli.Commands;
internal static class OfflineCommandGroup
{
internal static Command BuildOfflineCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var offline = new Command("offline", "Air-gap and offline kit operations.");
offline.Add(BuildOfflineImportCommand(services, verboseOption, cancellationToken));
offline.Add(BuildOfflineStatusCommand(services, verboseOption, cancellationToken));
return offline;
}
private static Command BuildOfflineImportCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var tenantOption = new Option<string?>("--tenant")
{
Description = "Tenant context for the import (defaults to profile/ENV)."
};
var bundleOption = new Option<string>("--bundle", new[] { "-b" })
{
Description = "Path to the offline kit payload bundle (.tar.zst).",
Required = true
};
var manifestOption = new Option<string?>("--manifest", new[] { "-m" })
{
Description = "Path to offline manifest JSON (defaults to manifest.json next to the bundle)."
};
var verifyDsseOption = new Option<bool>("--verify-dsse")
{
Description = "Verify DSSE signature on the kit statement."
}.SetDefaultValue(true);
var verifyRekorOption = new Option<bool>("--verify-rekor")
{
Description = "Verify Rekor receipt (offline mode)."
}.SetDefaultValue(true);
var trustRootOption = new Option<string?>("--trust-root")
{
Description = "Path to trust root public key file for DSSE verification."
};
var forceActivateOption = new Option<bool>("--force-activate")
{
Description = "Override monotonicity check (requires justification)."
};
var forceReasonOption = new Option<string?>("--force-reason")
{
Description = "Justification for force activation (required with --force-activate)."
};
var dryRunOption = new Option<bool>("--dry-run")
{
Description = "Validate the kit without activating."
};
var outputOption = new Option<string?>("--output", new[] { "-o" })
{
Description = "Output format: table (default), json."
}.SetDefaultValue("table").FromAmong("table", "json");
var command = new Command("import", "Import an offline kit with verification.")
{
tenantOption,
bundleOption,
manifestOption,
verifyDsseOption,
verifyRekorOption,
trustRootOption,
forceActivateOption,
forceReasonOption,
dryRunOption,
outputOption,
verboseOption
};
command.SetAction(parseResult =>
{
var tenant = parseResult.GetValue(tenantOption);
var bundle = parseResult.GetValue(bundleOption) ?? string.Empty;
var manifest = parseResult.GetValue(manifestOption);
var verifyDsse = parseResult.GetValue(verifyDsseOption);
var verifyRekor = parseResult.GetValue(verifyRekorOption);
var trustRoot = parseResult.GetValue(trustRootOption);
var forceActivate = parseResult.GetValue(forceActivateOption);
var forceReason = parseResult.GetValue(forceReasonOption);
var dryRun = parseResult.GetValue(dryRunOption);
var output = parseResult.GetValue(outputOption) ?? "table";
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleOfflineImportAsync(
services,
tenant,
bundle,
manifest,
verifyDsse,
verifyRekor,
trustRoot,
forceActivate,
forceReason,
dryRun,
output,
verbose,
cancellationToken);
});
return command;
}
private static Command BuildOfflineStatusCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var tenantOption = new Option<string?>("--tenant")
{
Description = "Tenant context for the status (defaults to profile/ENV)."
};
var outputOption = new Option<string?>("--output", new[] { "-o" })
{
Description = "Output format: table (default), json."
}.SetDefaultValue("table").FromAmong("table", "json");
var command = new Command("status", "Display current offline kit status.")
{
tenantOption,
outputOption,
verboseOption
};
command.SetAction(parseResult =>
{
var tenant = parseResult.GetValue(tenantOption);
var output = parseResult.GetValue(outputOption) ?? "table";
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleOfflineStatusAsync(
services,
tenant,
output,
verbose,
cancellationToken);
});
return command;
}
}

View File

@@ -0,0 +1,25 @@
namespace StellaOps.Cli.Commands;
/// <summary>
/// Exit codes for offline commands.
/// Per advisory A11.1-11.2.
/// </summary>
internal static class OfflineExitCodes
{
public const int Success = 0;
public const int FileNotFound = 1;
public const int ChecksumMismatch = 2; // HASH_MISMATCH
public const int SignatureFailure = 3; // SIG_FAIL_COSIGN, SIG_FAIL_MANIFEST
public const int FormatError = 4;
public const int DsseVerificationFailed = 5; // DSSE_VERIFY_FAIL
public const int RekorVerificationFailed = 6; // REKOR_VERIFY_FAIL
public const int ImportFailed = 7;
public const int VersionNonMonotonic = 8; // VERSION_NON_MONOTONIC
public const int PolicyDenied = 9; // POLICY_DENY
public const int SelftestFailed = 10; // SELFTEST_FAIL
public const int ValidationFailed = 11;
public const int VerificationFailed = 12;
public const int PolicyLoadFailed = 13;
public const int Cancelled = 130; // Standard SIGINT
}

View File

@@ -249,6 +249,20 @@ public static class CliErrorCodes
public const string ValidationFailed = "ERR_VALIDATION_FAILED";
public const string RateLimited = "ERR_RATE_LIMIT";
public const string AirGapBlocked = "ERR_AIRGAP_EGRESS_BLOCKED";
// CLI-AIRGAP-341-001: Offline Kit / AirGap error codes (exit code 7)
public const string OfflineKitImportFailed = "ERR_AIRGAP_OFFLINE_KIT_IMPORT_FAILED";
public const string OfflineKitStatusFailed = "ERR_AIRGAP_OFFLINE_KIT_STATUS_FAILED";
public const string OfflineKitVerifyFailed = "ERR_AIRGAP_OFFLINE_KIT_VERIFY_FAILED";
public const string OfflineKitHashMismatch = "ERR_AIRGAP_OFFLINE_KIT_HASH_MISMATCH";
public const string OfflineKitCosignSignatureInvalid = "ERR_AIRGAP_OFFLINE_KIT_SIG_FAIL_COSIGN";
public const string OfflineKitManifestSignatureInvalid = "ERR_AIRGAP_OFFLINE_KIT_SIG_FAIL_MANIFEST";
public const string OfflineKitDsseVerifyFailed = "ERR_AIRGAP_OFFLINE_KIT_DSSE_VERIFY_FAIL";
public const string OfflineKitRekorVerifyFailed = "ERR_AIRGAP_OFFLINE_KIT_REKOR_VERIFY_FAIL";
public const string OfflineKitSelfTestFailed = "ERR_AIRGAP_OFFLINE_KIT_SELFTEST_FAIL";
public const string OfflineKitVersionNonMonotonic = "ERR_AIRGAP_OFFLINE_KIT_VERSION_NON_MONOTONIC";
public const string OfflineKitPolicyDenied = "ERR_AIRGAP_OFFLINE_KIT_POLICY_DENY";
public const string AocViolation = "ERR_AOC_001";
public const string NetworkError = "ERR_NETWORK_FAILED";
public const string Timeout = "ERR_TIMEOUT";

View File

@@ -67,6 +67,11 @@ internal static class CliErrorRenderer
// Error code
AnsiConsole.MarkupLine($"[grey]Code:[/] {Markup.Escape(error.Code)}");
if (TryGetReasonCode(error, out var reasonCode))
{
AnsiConsole.MarkupLine($"[grey]Reason:[/] {Markup.Escape(reasonCode)}");
}
// Detail (if present)
if (!string.IsNullOrWhiteSpace(error.Detail))
{
@@ -207,5 +212,41 @@ internal static class CliErrorRenderer
RenderScopeGuidance(error);
RenderRateLimitGuidance(error);
RenderAuthGuidance(error);
RenderOfflineKitGuidance(error);
}
private static bool TryGetReasonCode(CliError error, out string reasonCode)
{
reasonCode = "";
if (error.Metadata is null || error.Metadata.Count == 0)
{
return false;
}
if ((!error.Metadata.TryGetValue("reason_code", out reasonCode) || string.IsNullOrWhiteSpace(reasonCode)) &&
(!error.Metadata.TryGetValue("reasonCode", out reasonCode) || string.IsNullOrWhiteSpace(reasonCode)))
{
return false;
}
reasonCode = OfflineKitReasonCodes.Normalize(reasonCode) ?? "";
return reasonCode.Length > 0;
}
private static void RenderOfflineKitGuidance(CliError error)
{
if (!TryGetReasonCode(error, out var reasonCode))
{
return;
}
var remediation = OfflineKitReasonCodes.GetRemediation(reasonCode);
if (string.IsNullOrWhiteSpace(remediation))
{
return;
}
AnsiConsole.WriteLine();
AnsiConsole.MarkupLine($"[yellow]Remediation:[/] {Markup.Escape(remediation)}");
}
}

View File

@@ -0,0 +1,63 @@
using StellaOps.Cli.Commands;
namespace StellaOps.Cli.Output;
public static class OfflineKitReasonCodes
{
public const string HashMismatch = "HASH_MISMATCH";
public const string SigFailCosign = "SIG_FAIL_COSIGN";
public const string SigFailManifest = "SIG_FAIL_MANIFEST";
public const string DsseVerifyFail = "DSSE_VERIFY_FAIL";
public const string RekorVerifyFail = "REKOR_VERIFY_FAIL";
public const string SelfTestFail = "SELFTEST_FAIL";
public const string VersionNonMonotonic = "VERSION_NON_MONOTONIC";
public const string PolicyDeny = "POLICY_DENY";
public static string? Normalize(string? reasonCode)
=> string.IsNullOrWhiteSpace(reasonCode) ? null : reasonCode.Trim().ToUpperInvariant();
public static int GetExitCode(string? reasonCode)
{
reasonCode = Normalize(reasonCode);
return reasonCode switch
{
HashMismatch => OfflineExitCodes.ChecksumMismatch,
SigFailCosign => OfflineExitCodes.SignatureFailure,
SigFailManifest => OfflineExitCodes.SignatureFailure,
DsseVerifyFail => OfflineExitCodes.DsseVerificationFailed,
RekorVerifyFail => OfflineExitCodes.RekorVerificationFailed,
VersionNonMonotonic => OfflineExitCodes.VersionNonMonotonic,
PolicyDeny => OfflineExitCodes.PolicyDenied,
SelfTestFail => OfflineExitCodes.SelftestFailed,
null => OfflineExitCodes.ImportFailed,
_ => OfflineExitCodes.ImportFailed
};
}
public static string? GetRemediation(string? reasonCode)
{
reasonCode = Normalize(reasonCode);
return reasonCode switch
{
HashMismatch =>
"Re-download the bundle and re-run import. If using removable media, verify the device is healthy and that the bundle digest matches the manifest.",
SigFailCosign =>
"Verify the Cosign signature and trust roots. Ensure you imported the correct signing public keys and that the signature matches the bundle.",
SigFailManifest =>
"Verify the manifest signature and trust roots. Ensure the manifest and its detached signature belong to the same kit version.",
DsseVerifyFail =>
"Verify DSSE trust roots and that the envelope key ID matches an allowed signer. Re-export the kit if the envelope is missing or malformed.",
RekorVerifyFail =>
"Verify Rekor inclusion proof settings (offline snapshot, UUID/index) and re-run verification. Check for time skew and stale transparency data.",
VersionNonMonotonic =>
"The incoming kit version is older than the active version. Import a newer kit, or use --force-activate (with a reason) for emergency rollback testing only.",
PolicyDeny =>
"The current policy denies activation. Review policy gates, waivers, and VEX precedence; then re-run import after updating policy inputs.",
SelfTestFail =>
"Run the Offline Kit self-test and review its output. Confirm required binaries, permissions, and disk space are available in the air-gapped environment.",
null => null,
_ => null
};
}
}

View File

@@ -16,6 +16,7 @@ using StellaOps.AirGap.Policy;
using StellaOps.Configuration;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.ExportCenter.Client;
using StellaOps.ExportCenter.Core.EvidenceCache;
namespace StellaOps.Cli;
@@ -155,6 +156,8 @@ internal static class Program
services.AddSingleton<IScannerExecutor, ScannerExecutor>();
services.AddSingleton<IScannerInstaller, ScannerInstaller>();
services.AddSingleton<MigrationCommandService>();
services.AddSingleton(TimeProvider.System);
services.AddSingleton<IEvidenceCacheService, LocalEvidenceCacheService>();
// CLI-FORENSICS-53-001: Forensic snapshot client
services.AddHttpClient<IForensicSnapshotClient, ForensicSnapshotClient>(client =>

View File

@@ -2320,6 +2320,37 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
return null;
}
private static string? ExtractProblemExtensionString(ProblemDocument? problem, params string[] keys)
{
if (problem?.Extensions is null || problem.Extensions.Count == 0 || keys.Length == 0)
{
return null;
}
foreach (var key in keys)
{
if (!problem.Extensions.TryGetValue(key, out var value) || value is null)
{
continue;
}
switch (value)
{
case string text when !string.IsNullOrWhiteSpace(text):
return text;
case JsonElement element when element.ValueKind == JsonValueKind.String:
var parsed = element.GetString();
if (!string.IsNullOrWhiteSpace(parsed))
{
return parsed;
}
break;
}
}
return null;
}
private static string BuildPolicyFindingsQueryString(PolicyFindingsQuery query)
{
var parameters = new List<string>();
@@ -2853,6 +2884,7 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
{
// Extract error code from problem type URI
errorCode = ExtractErrorCodeFromProblemType(problem.Type);
errorCode ??= ExtractProblemErrorCode(problem);
if (!string.IsNullOrWhiteSpace(problem.Title))
{
@@ -2868,21 +2900,23 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
// Check for trace_id in extensions
if (problem.Extensions is not null)
{
if (problem.Extensions.TryGetValue("trace_id", out var tid) && tid is string tidStr)
var extensionTraceId = ExtractProblemExtensionString(problem, "trace_id", "traceId");
if (!string.IsNullOrWhiteSpace(extensionTraceId))
{
traceId ??= tidStr;
traceId ??= extensionTraceId;
}
if (problem.Extensions.TryGetValue("traceId", out var tid2) && tid2 is string tid2Str)
var extensionErrorCode = ExtractProblemExtensionString(problem, "error_code", "errorCode");
if (!string.IsNullOrWhiteSpace(extensionErrorCode))
{
traceId ??= tid2Str;
errorCode ??= extensionErrorCode;
}
if (problem.Extensions.TryGetValue("error_code", out var ec) && ec is string ecStr)
var reasonCode = ExtractProblemExtensionString(problem, "reason_code", "reasonCode");
if (!string.IsNullOrWhiteSpace(reasonCode))
{
errorCode ??= ecStr;
}
if (problem.Extensions.TryGetValue("errorCode", out var ec2) && ec2 is string ec2Str)
{
errorCode ??= ec2Str;
metadata ??= new Dictionary<string, object?>(StringComparer.Ordinal);
metadata["reason_code"] = reasonCode;
}
}
}

View File

@@ -0,0 +1,120 @@
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.AirGap.Importer.Versioning;
namespace StellaOps.Cli.Services;
internal sealed class FileBundleVersionStore : IBundleVersionStore
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
private readonly string _stateDirectory;
private readonly ILogger<FileBundleVersionStore> _logger;
public FileBundleVersionStore(string stateDirectory, ILogger<FileBundleVersionStore> logger)
{
ArgumentException.ThrowIfNullOrWhiteSpace(stateDirectory);
_stateDirectory = stateDirectory;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<BundleVersionRecord?> GetCurrentAsync(
string tenantId,
string bundleType,
CancellationToken ct = default)
{
var history = await GetHistoryInternalAsync(tenantId, bundleType, ct).ConfigureAwait(false);
return history
.OrderByDescending(record => record.ActivatedAt)
.ThenByDescending(record => record.VersionString, StringComparer.Ordinal)
.FirstOrDefault();
}
public async Task UpsertAsync(BundleVersionRecord record, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(record);
Directory.CreateDirectory(_stateDirectory);
var path = GetStatePath(record.TenantId, record.BundleType);
var history = await GetHistoryInternalAsync(record.TenantId, record.BundleType, ct).ConfigureAwait(false);
history.Add(record);
var ordered = history
.OrderBy(r => r.ActivatedAt)
.ThenBy(r => r.VersionString, StringComparer.Ordinal)
.ToList();
var tempPath = path + ".tmp";
await using (var stream = File.Create(tempPath))
{
await JsonSerializer.SerializeAsync(stream, ordered, JsonOptions, ct).ConfigureAwait(false);
}
File.Copy(tempPath, path, overwrite: true);
File.Delete(tempPath);
}
public async Task<IReadOnlyList<BundleVersionRecord>> GetHistoryAsync(
string tenantId,
string bundleType,
int limit = 10,
CancellationToken ct = default)
{
var history = await GetHistoryInternalAsync(tenantId, bundleType, ct).ConfigureAwait(false);
return history
.OrderByDescending(r => r.ActivatedAt)
.ThenByDescending(r => r.VersionString, StringComparer.Ordinal)
.Take(Math.Max(0, limit))
.ToArray();
}
private async Task<List<BundleVersionRecord>> GetHistoryInternalAsync(
string tenantId,
string bundleType,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(bundleType);
var path = GetStatePath(tenantId, bundleType);
if (!File.Exists(path))
{
return new List<BundleVersionRecord>();
}
try
{
await using var stream = File.OpenRead(path);
var records = await JsonSerializer.DeserializeAsync<List<BundleVersionRecord>>(stream, JsonOptions, ct).ConfigureAwait(false);
return records ?? new List<BundleVersionRecord>();
}
catch (Exception ex) when (ex is IOException or JsonException)
{
_logger.LogWarning(ex, "Failed to read bundle version history from {Path}", path);
return new List<BundleVersionRecord>();
}
}
private string GetStatePath(string tenantId, string bundleType)
{
var safeTenant = SanitizePathSegment(tenantId);
var safeBundleType = SanitizePathSegment(bundleType);
return Path.Combine(_stateDirectory, $"bundle-versions__{safeTenant}__{safeBundleType}.json");
}
private static string SanitizePathSegment(string value)
{
var trimmed = value.Trim().ToLowerInvariant();
var invalid = Path.GetInvalidFileNameChars();
var chars = trimmed
.Select(c => invalid.Contains(c) || c == '/' || c == '\\' || char.IsWhiteSpace(c) ? '_' : c)
.ToArray();
return new string(chars);
}
}

View File

@@ -23,7 +23,6 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
{
private readonly IBundleCatalogRepository _catalogRepository;
private readonly IBundleItemRepository _itemRepository;
private readonly ImportValidator _validator;
private readonly ILogger<MirrorBundleImportService> _logger;
public MirrorBundleImportService(
@@ -34,7 +33,6 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
_catalogRepository = catalogRepository ?? throw new ArgumentNullException(nameof(catalogRepository));
_itemRepository = itemRepository ?? throw new ArgumentNullException(nameof(itemRepository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_validator = new ImportValidator();
}
public async Task<MirrorImportResult> ImportAsync(MirrorImportRequest request, CancellationToken cancellationToken)

View File

@@ -0,0 +1,92 @@
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Cli.Services;
internal sealed class OfflineKitStateStore
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
private readonly string _stateDirectory;
private readonly ILogger<OfflineKitStateStore> _logger;
public OfflineKitStateStore(string stateDirectory, ILogger<OfflineKitStateStore> logger)
{
ArgumentException.ThrowIfNullOrWhiteSpace(stateDirectory);
_stateDirectory = stateDirectory;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task SaveActiveAsync(OfflineKitActiveState state, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(state);
Directory.CreateDirectory(_stateDirectory);
var path = GetActiveStatePath(state.TenantId);
var temp = path + ".tmp";
await using (var stream = File.Create(temp))
{
await JsonSerializer.SerializeAsync(stream, state, JsonOptions, cancellationToken).ConfigureAwait(false);
}
File.Copy(temp, path, overwrite: true);
File.Delete(temp);
}
public async Task<OfflineKitActiveState?> LoadActiveAsync(string tenantId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var path = GetActiveStatePath(tenantId);
if (!File.Exists(path))
{
return null;
}
try
{
await using var stream = File.OpenRead(path);
return await JsonSerializer.DeserializeAsync<OfflineKitActiveState>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is IOException or JsonException)
{
_logger.LogWarning(ex, "Failed to read offline kit state from {Path}", path);
return null;
}
}
private string GetActiveStatePath(string tenantId)
{
var safeTenant = SanitizePathSegment(tenantId);
return Path.Combine(_stateDirectory, $"offline-kit-active__{safeTenant}.json");
}
private static string SanitizePathSegment(string value)
{
var trimmed = value.Trim().ToLowerInvariant();
var invalid = Path.GetInvalidFileNameChars();
var chars = trimmed
.Select(c => invalid.Contains(c) || c == '/' || c == '\\' || char.IsWhiteSpace(c) ? '_' : c)
.ToArray();
return new string(chars);
}
}
internal sealed record OfflineKitActiveState(
string TenantId,
string BundlePath,
string ManifestPath,
string Version,
DateTimeOffset ManifestCreatedAt,
string PayloadSha256,
string BundleDigest,
DateTimeOffset ActivatedAt,
bool DsseVerified,
bool RekorVerified,
bool WasForceActivated,
string? ForceActivateReason);

View File

@@ -237,10 +237,29 @@ public abstract class StellaOpsClientBase : IDisposable
var problem = JsonSerializer.Deserialize<ProblemDocument>(content, JsonOptions);
if (problem is not null)
{
var code = ExtractErrorCodeFromProblemType(problem.Type)
?? ExtractProblemExtensionString(problem, "error_code", "errorCode")
?? ExtractProblemExtensionString(problem, "code")
?? $"ERR_HTTP_{statusCode}";
var traceId = ExtractProblemExtensionString(problem, "trace_id", "traceId");
Dictionary<string, string>? metadata = null;
var reasonCode = ExtractProblemExtensionString(problem, "reason_code", "reasonCode");
if (!string.IsNullOrWhiteSpace(reasonCode))
{
metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["reason_code"] = reasonCode
};
}
return new CliError(
Code: problem.Type ?? $"ERR_HTTP_{statusCode}",
Code: code,
Message: problem.Title ?? $"HTTP error {statusCode}",
Detail: problem.Detail);
TraceId: traceId,
Detail: problem.Detail,
Metadata: metadata);
}
}
catch (JsonException)
@@ -253,6 +272,63 @@ public abstract class StellaOpsClientBase : IDisposable
return CliError.FromHttpStatus(statusCode, content);
}
private static string? ExtractErrorCodeFromProblemType(string? type)
{
if (string.IsNullOrWhiteSpace(type))
{
return null;
}
if (type.StartsWith("urn:stellaops:error:", StringComparison.OrdinalIgnoreCase))
{
return type[20..];
}
if (type.Contains("/errors/", StringComparison.OrdinalIgnoreCase))
{
var idx = type.LastIndexOf("/errors/", StringComparison.OrdinalIgnoreCase);
return idx < 0 ? null : type[(idx + 8)..];
}
if (type.StartsWith("ERR_", StringComparison.OrdinalIgnoreCase))
{
return type;
}
return null;
}
private static string? ExtractProblemExtensionString(ProblemDocument? problem, params string[] keys)
{
if (problem?.Extensions is null || problem.Extensions.Count == 0 || keys.Length == 0)
{
return null;
}
foreach (var key in keys)
{
if (!problem.Extensions.TryGetValue(key, out var value) || value is null)
{
continue;
}
switch (value)
{
case string text when !string.IsNullOrWhiteSpace(text):
return text;
case JsonElement element when element.ValueKind == JsonValueKind.String:
var parsed = element.GetString();
if (!string.IsNullOrWhiteSpace(parsed))
{
return parsed;
}
break;
}
}
return null;
}
public void Dispose()
{
if (_disposed)

View File

@@ -71,6 +71,7 @@
<ProjectReference Include="../../Excititor/__Libraries/StellaOps.Excititor.Storage.Postgres/StellaOps.Excititor.Storage.Postgres.csproj" />
<ProjectReference Include="../../Policy/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
<ProjectReference Include="../../ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj" />
<ProjectReference Include="../../ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">

View File

@@ -7,3 +7,5 @@
| `CLI-AIAI-31-002` | DONE (2025-11-24) | `stella advise explain` (conflict narrative) command implemented and tested. |
| `CLI-AIAI-31-003` | DONE (2025-11-24) | `stella advise remediate` command implemented and tested. |
| `CLI-AIAI-31-004` | DONE (2025-11-24) | `stella advise batch` supports multi-key runs, per-key outputs, summary table, and tests (`HandleAdviseBatchAsync_RunsAllAdvisories`). |
| `CLI-AIRGAP-339-001` | DONE (2025-12-15) | Implemented `stella offline import/status` (DSSE verify, monotonicity + quarantine hooks, state storage), plus tests and docs; Rekor inclusion proof verification and `verify offline` policy remain blocked pending contracts. |
| `CLI-AIRGAP-341-001` | DONE (2025-12-15) | Sprint 0341: Offline Kit reason/error codes and ProblemDetails integration shipped; tests passing. |

View File

@@ -11,6 +11,31 @@ namespace StellaOps.Cli.Tests.Commands;
public sealed class CommandFactoryTests
{
[Fact]
public void Create_ExposesOfflineCommands()
{
using var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.None));
var services = new ServiceCollection().BuildServiceProvider();
var root = CommandFactory.Create(services, new StellaOpsCliOptions(), CancellationToken.None, loggerFactory);
var offline = Assert.Single(root.Subcommands, command => string.Equals(command.Name, "offline", StringComparison.Ordinal));
Assert.Contains(offline.Subcommands, command => string.Equals(command.Name, "import", StringComparison.Ordinal));
Assert.Contains(offline.Subcommands, command => string.Equals(command.Name, "status", StringComparison.Ordinal));
}
[Fact]
public void Create_ExposesExportCacheCommands()
{
using var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.None));
var services = new ServiceCollection().BuildServiceProvider();
var root = CommandFactory.Create(services, new StellaOpsCliOptions(), CancellationToken.None, loggerFactory);
var export = Assert.Single(root.Subcommands, command => string.Equals(command.Name, "export", StringComparison.Ordinal));
var cache = Assert.Single(export.Subcommands, command => string.Equals(command.Name, "cache", StringComparison.Ordinal));
Assert.Contains(cache.Subcommands, command => string.Equals(command.Name, "stats", StringComparison.Ordinal));
Assert.Contains(cache.Subcommands, command => string.Equals(command.Name, "process-queue", StringComparison.Ordinal));
}
[Fact]
public void Create_ExposesRubyInspectAndResolveCommands()
{

View File

@@ -134,21 +134,23 @@ public sealed class CommandHandlersTests
var console = new TestConsole();
var originalConsole = AnsiConsole.Console;
var bestPlan = new EntryTracePlan(
ImmutableArray.Create("/usr/bin/python", "app.py"),
ImmutableDictionary<string, string>.Empty,
"/workspace",
"appuser",
"/usr/bin/python",
EntryTraceTerminalType.Managed,
"python",
0.95,
ImmutableDictionary<string, string>.Empty);
var graph = new EntryTraceGraph(
EntryTraceOutcome.Resolved,
ImmutableArray<EntryTraceNode>.Empty,
ImmutableArray<EntryTraceEdge>.Empty,
ImmutableArray<EntryTraceDiagnostic>.Empty,
ImmutableArray.Create(new EntryTracePlan(
ImmutableArray.Create("/usr/bin/python", "app.py"),
ImmutableDictionary<string, string>.Empty,
"/workspace",
"appuser",
"/usr/bin/python",
EntryTraceTerminalType.Managed,
"python",
0.95,
ImmutableDictionary<string, string>.Empty)),
ImmutableArray.Create(bestPlan),
ImmutableArray.Create(new EntryTraceTerminal(
"/usr/bin/python",
EntryTraceTerminalType.Managed,
@@ -166,7 +168,8 @@ public sealed class CommandHandlersTests
"sha256:deadbeef",
DateTimeOffset.Parse("2025-11-02T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal),
graph,
new[] { "{\"type\":\"terminal\"}" })
new[] { "{\"type\":\"terminal\"}" },
bestPlan)
};
var provider = BuildServiceProvider(backend);
@@ -178,6 +181,7 @@ public sealed class CommandHandlersTests
provider,
"scan-123",
includeNdjson: true,
includeSemantic: false,
verbose: false,
cancellationToken: CancellationToken.None);
@@ -211,6 +215,7 @@ public sealed class CommandHandlersTests
provider,
"scan-missing",
includeNdjson: false,
includeSemantic: false,
verbose: false,
cancellationToken: CancellationToken.None));
@@ -1342,104 +1347,6 @@ public sealed class CommandHandlersTests
}
}
[Fact]
public async Task HandleAdviseRunAsync_WritesMarkdownWithCitations_ForExplain()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
var testConsole = new TestConsole();
try
{
Environment.ExitCode = 0;
AnsiConsole.Console = testConsole;
var planResponse = new AdvisoryPipelinePlanResponseModel
{
TaskType = "Conflict",
CacheKey = "plan-conflict",
PromptTemplate = "prompts/advisory/conflict.liquid",
Budget = new AdvisoryTaskBudgetModel
{
PromptTokens = 128,
CompletionTokens = 64
},
Chunks = Array.Empty<PipelineChunkSummaryModel>(),
Vectors = Array.Empty<PipelineVectorSummaryModel>(),
Metadata = new Dictionary<string, string>()
};
var outputResponse = new AdvisoryPipelineOutputModel
{
CacheKey = planResponse.CacheKey,
TaskType = planResponse.TaskType,
Profile = "default",
Prompt = "Sanitized prompt",
Response = "Rendered conflict body.",
Citations = new[]
{
new AdvisoryOutputCitationModel { Index = 1, DocumentId = "doc-42", ChunkId = "chunk-42" }
},
Metadata = new Dictionary<string, string>(),
Guardrail = new AdvisoryOutputGuardrailModel
{
Blocked = false,
SanitizedPrompt = "Sanitized prompt",
Violations = Array.Empty<AdvisoryOutputGuardrailViolationModel>(),
Metadata = new Dictionary<string, string>()
},
Provenance = new AdvisoryOutputProvenanceModel
{
InputDigest = "sha256:conflict-in",
OutputHash = "sha256:conflict-out",
Signatures = Array.Empty<string>()
},
GeneratedAtUtc = DateTimeOffset.Parse("2025-11-06T12:00:00Z", CultureInfo.InvariantCulture),
PlanFromCache = false
};
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
AdvisoryPlanResponse = planResponse,
AdvisoryOutputResponse = outputResponse
};
var provider = BuildServiceProvider(backend);
var outputPath = Path.GetTempFileName();
await CommandHandlers.HandleAdviseRunAsync(
provider,
AdvisoryAiTaskType.Conflict,
"ADV-42",
null,
null,
null,
"default",
Array.Empty<string>(),
forceRefresh: false,
timeoutSeconds: 0,
outputFormat: AdvisoryOutputFormat.Markdown,
outputPath: outputPath,
verbose: false,
cancellationToken: CancellationToken.None);
var markdown = await File.ReadAllTextAsync(outputPath);
Assert.Contains("Conflict", markdown, StringComparison.OrdinalIgnoreCase);
Assert.Contains("Rendered conflict body", markdown, StringComparison.OrdinalIgnoreCase);
Assert.Contains("doc-42", markdown, StringComparison.OrdinalIgnoreCase);
Assert.Contains("chunk-42", markdown, StringComparison.OrdinalIgnoreCase);
Assert.Contains("Citations", markdown, StringComparison.OrdinalIgnoreCase);
Assert.Equal(0, Environment.ExitCode);
Assert.Contains("Conflict", testConsole.Output, StringComparison.OrdinalIgnoreCase);
Assert.Equal(AdvisoryAiTaskType.Conflict, backend.AdvisoryPlanRequests.Last().TaskType);
}
finally
{
AnsiConsole.Console = originalConsole;
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandleAdviseRunAsync_WritesMarkdownWithCitations_ForRemediationTask()
{
@@ -2503,6 +2410,7 @@ public sealed class CommandHandlersTests
"sbom:S-42",
new[] { "CVE-2021-23337", "GHSA-xxxx-yyyy" },
new PolicyFindingVexMetadata("VendorX-123", "vendor-x", "not_affected"),
null,
4,
DateTimeOffset.Parse("2025-10-26T14:06:01Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal),
"run:P-7:2025-10-26:auto")
@@ -2570,6 +2478,7 @@ public sealed class CommandHandlersTests
"sbom:S-99",
Array.Empty<string>(),
null,
null,
3,
DateTimeOffset.MinValue,
null)
@@ -2638,6 +2547,7 @@ public sealed class CommandHandlersTests
"sbom:S-1",
new[] { "CVE-1111" },
new PolicyFindingVexMetadata("VendorY-9", null, "affected"),
null,
7,
DateTimeOffset.Parse("2025-10-26T12:34:56Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal),
"run:P-9:1234")
@@ -2787,6 +2697,14 @@ public sealed class CommandHandlersTests
outputPath: null,
explain: true,
failOnDiff: false,
withExceptions: Array.Empty<string>(),
withoutExceptions: Array.Empty<string>(),
mode: null,
sbomSelectors: Array.Empty<string>(),
includeHeatmap: false,
manifestDownload: false,
reachabilityStates: Array.Empty<string>(),
reachabilityScores: Array.Empty<string>(),
verbose: false,
cancellationToken: CancellationToken.None);
@@ -2849,6 +2767,14 @@ public sealed class CommandHandlersTests
outputPath: null,
explain: false,
failOnDiff: false,
withExceptions: Array.Empty<string>(),
withoutExceptions: Array.Empty<string>(),
mode: null,
sbomSelectors: Array.Empty<string>(),
includeHeatmap: false,
manifestDownload: false,
reachabilityStates: Array.Empty<string>(),
reachabilityScores: Array.Empty<string>(),
verbose: false,
cancellationToken: CancellationToken.None);
@@ -2898,6 +2824,14 @@ public sealed class CommandHandlersTests
outputPath: null,
explain: false,
failOnDiff: true,
withExceptions: Array.Empty<string>(),
withoutExceptions: Array.Empty<string>(),
mode: null,
sbomSelectors: Array.Empty<string>(),
includeHeatmap: false,
manifestDownload: false,
reachabilityStates: Array.Empty<string>(),
reachabilityScores: Array.Empty<string>(),
verbose: false,
cancellationToken: CancellationToken.None);
@@ -2937,6 +2871,14 @@ public sealed class CommandHandlersTests
outputPath: null,
explain: false,
failOnDiff: false,
withExceptions: Array.Empty<string>(),
withoutExceptions: Array.Empty<string>(),
mode: null,
sbomSelectors: Array.Empty<string>(),
includeHeatmap: false,
manifestDownload: false,
reachabilityStates: Array.Empty<string>(),
reachabilityScores: Array.Empty<string>(),
verbose: false,
cancellationToken: CancellationToken.None);
@@ -4454,6 +4396,7 @@ spec:
"sbom:default",
Array.Empty<string>(),
null,
null,
1,
DateTimeOffset.UtcNow,
null);
@@ -4472,7 +4415,7 @@ spec:
public List<(AdvisoryAiTaskType TaskType, AdvisoryPipelinePlanRequestModel Request)> AdvisoryPlanRequests { get; } = new();
public AdvisoryPipelinePlanResponseModel? AdvisoryPlanResponse { get; set; }
public Exception? AdvisoryPlanException { get; set; }
public Queue<AdvisoryPipelineOutputModel?> AdvisoryOutputQueue { get; } = new();
public Queue<AdvisoryPipelineOutputModel?> AdvisoryOutputQueue { get; set; } = new();
public AdvisoryPipelineOutputModel? AdvisoryOutputResponse { get; set; }
public Exception? AdvisoryOutputException { get; set; }
public List<(string CacheKey, AdvisoryAiTaskType TaskType, string Profile)> AdvisoryOutputRequests { get; } = new();
@@ -4704,6 +4647,119 @@ spec:
return Task.FromResult(AdvisoryOutputResponse);
}
public Task<RiskProfileListResponse> ListRiskProfilesAsync(RiskProfileListRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new RiskProfileListResponse());
public Task<RiskSimulateResult> SimulateRiskAsync(RiskSimulateRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new RiskSimulateResult());
public Task<RiskResultsResponse> GetRiskResultsAsync(RiskResultsRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new RiskResultsResponse());
public Task<RiskBundleVerifyResult> VerifyRiskBundleAsync(RiskBundleVerifyRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new RiskBundleVerifyResult());
public Task<ReachabilityUploadCallGraphResult> UploadCallGraphAsync(ReachabilityUploadCallGraphRequest request, Stream callGraphStream, CancellationToken cancellationToken)
=> Task.FromResult(new ReachabilityUploadCallGraphResult());
public Task<ReachabilityListResponse> ListReachabilityAnalysesAsync(ReachabilityListRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new ReachabilityListResponse());
public Task<ReachabilityExplainResult> ExplainReachabilityAsync(ReachabilityExplainRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new ReachabilityExplainResult());
public Task<GraphExplainResult> ExplainGraphAsync(GraphExplainRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new GraphExplainResult());
public Task<ApiSpecListResponse> ListApiSpecsAsync(string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new ApiSpecListResponse());
public Task<ApiSpecDownloadResult> DownloadApiSpecAsync(ApiSpecDownloadRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new ApiSpecDownloadResult());
public Task<SdkUpdateResponse> CheckSdkUpdatesAsync(SdkUpdateRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new SdkUpdateResponse());
public Task<SdkListResponse> ListInstalledSdksAsync(string? language, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new SdkListResponse());
public Task<PolicyHistoryResponse> GetPolicyHistoryAsync(PolicyHistoryRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyHistoryResponse());
public Task<PolicyExplainResult> GetPolicyExplainAsync(PolicyExplainRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyExplainResult());
public Task<PolicyVersionBumpResult> BumpPolicyVersionAsync(PolicyVersionBumpRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyVersionBumpResult());
public Task<PolicySubmitResult> SubmitPolicyForReviewAsync(PolicySubmitRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicySubmitResult());
public Task<PolicyReviewCommentResult> AddPolicyReviewCommentAsync(PolicyReviewCommentRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyReviewCommentResult());
public Task<PolicyApproveResult> ApprovePolicyReviewAsync(PolicyApproveRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyApproveResult());
public Task<PolicyRejectResult> RejectPolicyReviewAsync(PolicyRejectRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyRejectResult());
public Task<PolicyReviewSummary?> GetPolicyReviewStatusAsync(PolicyReviewStatusRequest request, CancellationToken cancellationToken)
=> Task.FromResult<PolicyReviewSummary?>(null);
public Task<PolicyPublishResult> PublishPolicyAsync(PolicyPublishRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyPublishResult());
public Task<PolicyPromoteResult> PromotePolicyAsync(PolicyPromoteRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyPromoteResult());
public Task<PolicyRollbackResult> RollbackPolicyAsync(PolicyRollbackRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyRollbackResult());
public Task<PolicySignResult> SignPolicyAsync(PolicySignRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicySignResult());
public Task<PolicyVerifySignatureResult> VerifyPolicySignatureAsync(PolicyVerifySignatureRequest request, CancellationToken cancellationToken)
=> Task.FromResult(new PolicyVerifySignatureResult());
public Task<VexConsensusListResponse> ListVexConsensusAsync(VexConsensusListRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VexConsensusListResponse(Array.Empty<VexConsensusItem>(), 0, 0, 0, false));
public Task<VexConsensusDetailResponse?> GetVexConsensusAsync(string vulnerabilityId, string productKey, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult<VexConsensusDetailResponse?>(null);
public Task<VexSimulationResponse> SimulateVexConsensusAsync(VexSimulationRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VexSimulationResponse(
Array.Empty<VexSimulationResultItem>(),
new VexSimulationParameters(0.0, 0),
new VexSimulationSummary(0, 0, 0, 0, 0)));
public Task<VexExportResponse> ExportVexConsensusAsync(VexExportRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VexExportResponse("export-0"));
public Task<Stream> DownloadVexExportAsync(string exportId, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult<Stream>(new MemoryStream(Encoding.UTF8.GetBytes("{}")));
public Task<VulnListResponse> ListVulnerabilitiesAsync(VulnListRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VulnListResponse(Array.Empty<VulnItem>(), 0, 0, 0, false));
public Task<VulnDetailResponse?> GetVulnerabilityAsync(string vulnerabilityId, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult<VulnDetailResponse?>(null);
public Task<VulnWorkflowResponse> ExecuteVulnWorkflowAsync(VulnWorkflowRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VulnWorkflowResponse(true, request.Action, 0, Array.Empty<string>()));
public Task<VulnSimulationResponse> SimulateVulnerabilitiesAsync(VulnSimulationRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VulnSimulationResponse(
Array.Empty<VulnSimulationDelta>(),
new VulnSimulationSummary(0, 0, 0, 0, 0)));
public Task<VulnExportResponse> ExportVulnerabilitiesAsync(VulnExportRequest request, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult(new VulnExportResponse("export-0"));
public Task<Stream> DownloadVulnExportAsync(string exportId, string? tenant, CancellationToken cancellationToken)
=> Task.FromResult<Stream>(new MemoryStream(Encoding.UTF8.GetBytes("{}")));
}
private sealed class StubExecutor : IScannerExecutor
@@ -4832,6 +4888,12 @@ spec:
LastQuery = query;
return Task.FromResult(_response);
}
public Task<AdvisoryLinksetResponse> GetLinksetAsync(AdvisoryLinksetQuery query, CancellationToken cancellationToken)
=> Task.FromResult(new AdvisoryLinksetResponse());
public Task<AdvisoryLinksetObservation?> GetObservationByIdAsync(string tenant, string observationId, CancellationToken cancellationToken)
=> Task.FromResult<AdvisoryLinksetObservation?>(null);
}
[Fact]

View File

@@ -0,0 +1,126 @@
using System;
using System.IO;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Spectre.Console;
using Spectre.Console.Testing;
using StellaOps.Cli.Commands;
using StellaOps.Cli.Tests.Testing;
using StellaOps.ExportCenter.Core.EvidenceCache;
namespace StellaOps.Cli.Tests.Commands;
public sealed class ExportCacheCommandHandlersTests
{
[Fact]
public async Task HandleExportCacheStatsAsync_Json_EmitsStatistics()
{
using var temp = new TempDirectory();
var scanOutputPath = temp.Path;
var cacheService = new LocalEvidenceCacheService(TimeProvider.System, NullLogger<LocalEvidenceCacheService>.Instance);
await cacheService.CacheEvidenceAsync(
scanOutputPath,
new CachedEvidenceBundle
{
AlertId = "alert-1",
ArtifactId = "scan-1",
ComputedAt = DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
Reachability = new CachedEvidenceSection { Status = EvidenceStatus.Available },
CallStack = new CachedEvidenceSection { Status = EvidenceStatus.Available },
Provenance = new CachedEvidenceSection { Status = EvidenceStatus.Available },
VexStatus = new CachedEvidenceSection { Status = EvidenceStatus.Available }
},
CancellationToken.None);
using var services = BuildServices(cacheService);
var output = await CaptureTestConsoleAsync(console => CommandHandlers.HandleExportCacheStatsAsync(
services,
scanOutputPath,
json: true,
verbose: false,
CancellationToken.None));
Assert.Equal(0, output.ExitCode);
using var document = JsonDocument.Parse(output.Console.Trim());
Assert.Equal(Path.GetFullPath(scanOutputPath), document.RootElement.GetProperty("scanOutput").GetString());
Assert.Equal(1, document.RootElement.GetProperty("statistics").GetProperty("totalBundles").GetInt32());
}
[Fact]
public async Task HandleExportCacheProcessQueueAsync_Json_EmitsCounts()
{
using var temp = new TempDirectory();
var scanOutputPath = temp.Path;
var cacheService = new LocalEvidenceCacheService(TimeProvider.System, NullLogger<LocalEvidenceCacheService>.Instance);
await cacheService.CacheEvidenceAsync(
scanOutputPath,
new CachedEvidenceBundle
{
AlertId = "alert-1",
ArtifactId = "scan-1",
ComputedAt = DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
Reachability = new CachedEvidenceSection { Status = EvidenceStatus.Available },
CallStack = new CachedEvidenceSection { Status = EvidenceStatus.Available },
Provenance = new CachedEvidenceSection { Status = EvidenceStatus.PendingEnrichment, UnavailableReason = "offline" },
VexStatus = new CachedEvidenceSection { Status = EvidenceStatus.Available }
},
CancellationToken.None);
using var services = BuildServices(cacheService);
var output = await CaptureTestConsoleAsync(console => CommandHandlers.HandleExportCacheProcessQueueAsync(
services,
scanOutputPath,
json: true,
verbose: false,
CancellationToken.None));
Assert.Equal(0, output.ExitCode);
using var document = JsonDocument.Parse(output.Console.Trim());
var result = document.RootElement.GetProperty("result");
Assert.Equal(0, result.GetProperty("processedCount").GetInt32());
Assert.Equal(1, result.GetProperty("failedCount").GetInt32());
Assert.Equal(1, result.GetProperty("remainingCount").GetInt32());
}
private static ServiceProvider BuildServices(IEvidenceCacheService cacheService)
{
var services = new ServiceCollection();
services.AddSingleton(TimeProvider.System);
services.AddSingleton(cacheService);
services.AddSingleton<ILoggerFactory>(_ => LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.None)));
return services.BuildServiceProvider();
}
private static async Task<CapturedConsoleOutput> CaptureTestConsoleAsync(Func<TestConsole, Task<int>> action)
{
var testConsole = new TestConsole();
testConsole.Width(4000);
var originalConsole = AnsiConsole.Console;
var originalOut = Console.Out;
using var writer = new StringWriter();
try
{
AnsiConsole.Console = testConsole;
Console.SetOut(writer);
var exitCode = await action(testConsole).ConfigureAwait(false);
return new CapturedConsoleOutput(exitCode, testConsole.Output.ToString(), writer.ToString());
}
finally
{
Console.SetOut(originalOut);
AnsiConsole.Console = originalConsole;
}
}
private sealed record CapturedConsoleOutput(int ExitCode, string Console, string Plain);
}

View File

@@ -0,0 +1,277 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Spectre.Console;
using Spectre.Console.Testing;
using StellaOps.Cli.Commands;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Telemetry;
using StellaOps.Cli.Tests.Testing;
namespace StellaOps.Cli.Tests.Commands;
public sealed class OfflineCommandHandlersTests
{
[Fact]
public async Task HandleOfflineImportAsync_ForceActivateRequiresReason()
{
using var temp = new TempDirectory();
var bundlePath = Path.Combine(temp.Path, "bundle.tar.zst");
await File.WriteAllTextAsync(bundlePath, "payload", CancellationToken.None);
using var services = BuildServices(new StellaOpsCliOptions
{
Offline = new StellaOpsCliOfflineOptions
{
KitsDirectory = Path.Combine(temp.Path, "offline-kits")
}
});
var originalExitCode = Environment.ExitCode;
try
{
var output = await CaptureTestConsoleAsync(console => CommandHandlers.HandleOfflineImportAsync(
services,
tenant: null,
bundlePath: bundlePath,
manifestPath: null,
verifyDsse: false,
verifyRekor: false,
trustRootPath: null,
forceActivate: true,
forceReason: null,
dryRun: true,
outputFormat: "json",
verbose: false,
cancellationToken: CancellationToken.None));
Assert.Equal(OfflineExitCodes.ValidationFailed, Environment.ExitCode);
using var document = JsonDocument.Parse(output.Console.Trim());
Assert.Equal("error", document.RootElement.GetProperty("status").GetString());
Assert.Equal(OfflineExitCodes.ValidationFailed, document.RootElement.GetProperty("exitCode").GetInt32());
Assert.Contains("force-reason", document.RootElement.GetProperty("message").GetString() ?? string.Empty, StringComparison.OrdinalIgnoreCase);
}
finally
{
Environment.ExitCode = originalExitCode;
}
}
[Fact]
public async Task HandleOfflineImportAndStatusAsync_SavesActiveState()
{
using var temp = new TempDirectory();
var bundleDir = Path.Combine(temp.Path, "bundle");
Directory.CreateDirectory(bundleDir);
var bundlePath = Path.Combine(bundleDir, "bundle-1.0.0.tar.zst");
var bundleBytes = Encoding.UTF8.GetBytes("deterministic-offline-kit");
await File.WriteAllBytesAsync(bundlePath, bundleBytes, CancellationToken.None);
var bundleDigest = ComputeSha256Hex(bundleBytes);
var manifestPath = Path.Combine(bundleDir, "manifest.json");
var manifestJson = JsonSerializer.Serialize(new
{
version = "1.0.0",
created_at = "2025-12-14T00:00:00Z",
payload_sha256 = bundleDigest
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
await File.WriteAllTextAsync(manifestPath, manifestJson, CancellationToken.None);
using var rsa = RSA.Create(2048);
var publicKeyDer = rsa.ExportSubjectPublicKeyInfo();
var fingerprint = ComputeSha256Hex(publicKeyDer);
var trustRootPath = Path.Combine(bundleDir, "trust-root.pub");
await File.WriteAllTextAsync(trustRootPath, WrapPem("PUBLIC KEY", publicKeyDer), CancellationToken.None);
var payloadJson = JsonSerializer.Serialize(new
{
subject = new[]
{
new
{
digest = new
{
sha256 = bundleDigest
}
}
}
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var payloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(payloadJson));
var pae = BuildDssePae("application/vnd.in-toto+json", payloadBase64);
var signature = Convert.ToBase64String(rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pss));
var dssePath = Path.Combine(bundleDir, "statement.dsse.json");
var dsseJson = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = payloadBase64,
signatures = new[]
{
new { keyid = fingerprint, sig = signature }
}
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
await File.WriteAllTextAsync(dssePath, dsseJson, CancellationToken.None);
var rootHash = "deadbeef";
var rekorPath = Path.Combine(bundleDir, "rekor-receipt.json");
var rekorJson = JsonSerializer.Serialize(new
{
uuid = "rekor-test",
logIndex = 42,
rootHash,
hashes = new[] { "hash-1" },
checkpoint = $"checkpoint {rootHash}"
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
await File.WriteAllTextAsync(rekorPath, rekorJson, CancellationToken.None);
var kitsDirectory = Path.Combine(temp.Path, "offline-kits");
using var services = BuildServices(new StellaOpsCliOptions
{
Offline = new StellaOpsCliOfflineOptions
{
KitsDirectory = kitsDirectory
}
});
var originalExitCode = Environment.ExitCode;
try
{
var importOutput = await CaptureTestConsoleAsync(console => CommandHandlers.HandleOfflineImportAsync(
services,
tenant: null,
bundlePath: bundlePath,
manifestPath: manifestPath,
verifyDsse: true,
verifyRekor: true,
trustRootPath: trustRootPath,
forceActivate: false,
forceReason: null,
dryRun: false,
outputFormat: "json",
verbose: false,
cancellationToken: CancellationToken.None));
Assert.Equal(OfflineExitCodes.Success, Environment.ExitCode);
using (var document = JsonDocument.Parse(importOutput.Console.Trim()))
{
Assert.Equal("imported", document.RootElement.GetProperty("status").GetString());
Assert.Equal(OfflineExitCodes.Success, document.RootElement.GetProperty("exitCode").GetInt32());
Assert.True(document.RootElement.GetProperty("dsseVerified").GetBoolean());
Assert.True(document.RootElement.GetProperty("rekorVerified").GetBoolean());
Assert.Equal("1.0.0", document.RootElement.GetProperty("version").GetString());
}
var statePath = Path.Combine(kitsDirectory, ".state", "offline-kit-active__default.json");
Assert.True(File.Exists(statePath));
var statusOutput = await CaptureTestConsoleAsync(console => CommandHandlers.HandleOfflineStatusAsync(
services,
tenant: null,
outputFormat: "json",
verbose: false,
cancellationToken: CancellationToken.None));
Assert.Equal(OfflineExitCodes.Success, Environment.ExitCode);
using (var document = JsonDocument.Parse(statusOutput.Console.Trim()))
{
Assert.Equal("default", document.RootElement.GetProperty("tenantId").GetString());
var active = document.RootElement.GetProperty("active");
Assert.Equal("bundle-1.0.0.tar.zst", active.GetProperty("kitId").GetString());
Assert.Equal("1.0.0", active.GetProperty("version").GetString());
Assert.Equal($"sha256:{bundleDigest}", active.GetProperty("digest").GetString());
}
}
finally
{
Environment.ExitCode = originalExitCode;
}
}
private static ServiceProvider BuildServices(StellaOpsCliOptions options)
{
var services = new ServiceCollection();
services.AddSingleton(options);
services.AddSingleton(new VerbosityState());
services.AddSingleton<ILoggerFactory>(_ => LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.None)));
return services.BuildServiceProvider();
}
private static async Task<CapturedConsoleOutput> CaptureTestConsoleAsync(Func<TestConsole, Task> action)
{
var testConsole = new TestConsole();
testConsole.Width(4000);
var originalConsole = AnsiConsole.Console;
var originalOut = Console.Out;
using var writer = new StringWriter();
try
{
AnsiConsole.Console = testConsole;
Console.SetOut(writer);
await action(testConsole).ConfigureAwait(false);
return new CapturedConsoleOutput(testConsole.Output.ToString(), writer.ToString());
}
finally
{
Console.SetOut(originalOut);
AnsiConsole.Console = originalConsole;
}
}
private static string ComputeSha256Hex(byte[] bytes)
{
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static byte[] BuildDssePae(string payloadType, string payloadBase64)
{
var payloadBytes = Convert.FromBase64String(payloadBase64);
var payloadText = Encoding.UTF8.GetString(payloadBytes);
var parts = new[]
{
"DSSEv1",
payloadType,
payloadText
};
var builder = new StringBuilder();
builder.Append("PAE:");
builder.Append(parts.Length);
foreach (var part in parts)
{
builder.Append(' ');
builder.Append(part.Length);
builder.Append(' ');
builder.Append(part);
}
return Encoding.UTF8.GetBytes(builder.ToString());
}
private static string WrapPem(string label, byte[] derBytes)
{
var base64 = Convert.ToBase64String(derBytes);
var builder = new StringBuilder();
builder.Append("-----BEGIN ").Append(label).AppendLine("-----");
for (var offset = 0; offset < base64.Length; offset += 64)
{
builder.AppendLine(base64.Substring(offset, Math.Min(64, base64.Length - offset)));
}
builder.Append("-----END ").Append(label).AppendLine("-----");
return builder.ToString();
}
private sealed record CapturedConsoleOutput(string Console, string Plain);
}

View File

@@ -2,6 +2,7 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cli.Commands;
using Xunit;
@@ -54,7 +55,7 @@ internal static class CommandHandlersTestShim
{
public static Task VerifyBundlePublicAsync(string path, ILogger logger, CancellationToken token)
=> typeof(CommandHandlers)
.GetMethod(\"VerifyBundleAsync\", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Static)!
.GetMethod("VerifyBundleAsync", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Static)!
.Invoke(null, new object[] { path, logger, token }) as Task
?? Task.CompletedTask;
}

View File

@@ -7,7 +7,24 @@ namespace StellaOps.Cli.Tests.Contracts;
public sealed class CliSpecTests
{
private static readonly string SpecPath = Path.Combine("docs", "modules", "cli", "contracts", "cli-spec-v1.yaml");
private static readonly string SpecPath = ResolveSpecPath();
private static string ResolveSpecPath()
{
var relative = Path.Combine("docs", "modules", "cli", "contracts", "cli-spec-v1.yaml");
var baseDirectory = new DirectoryInfo(AppContext.BaseDirectory);
for (var directory = baseDirectory; directory is not null; directory = directory.Parent)
{
var candidate = Path.Combine(directory.FullName, relative);
if (File.Exists(candidate))
{
return candidate;
}
}
return relative;
}
[Fact]
public async Task Spec_Exists_And_Has_PrivacyDefaults()

View File

@@ -292,7 +292,8 @@ public sealed class BackendOperationsClientTests
"sha256:test",
generatedAt,
graph,
EntryTraceNdjsonWriter.Serialize(graph, new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt)));
EntryTraceNdjsonWriter.Serialize(graph, new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt)),
plan);
var json = JsonSerializer.Serialize(responseModel, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var handler = new StubHttpMessageHandler((request, _) =>

View File

@@ -0,0 +1,143 @@
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.EvidenceCache;
namespace StellaOps.ExportCenter.Tests.EvidenceCache;
public sealed class LocalEvidenceCacheServiceTests
{
[Fact]
public async Task CacheEvidenceAsync_WritesManifestAndUpdatesStatistics()
{
using var temp = new TempDirectory();
var service = new LocalEvidenceCacheService(TimeProvider.System, NullLogger<LocalEvidenceCacheService>.Instance);
var bundle = new CachedEvidenceBundle
{
AlertId = "alert-1",
ArtifactId = "scan-1",
ComputedAt = DateTimeOffset.Parse("2025-12-14T00:00:00Z"),
Reachability = new CachedEvidenceSection
{
Status = EvidenceStatus.Available,
Hash = "sha256:reach",
Proof = new { ok = true }
},
CallStack = new CachedEvidenceSection
{
Status = EvidenceStatus.Available
},
Provenance = new CachedEvidenceSection
{
Status = EvidenceStatus.PendingEnrichment,
UnavailableReason = "offline"
},
VexStatus = new CachedEvidenceSection
{
Status = EvidenceStatus.Available
}
};
var cacheResult = await service.CacheEvidenceAsync(temp.Path, bundle, CancellationToken.None);
Assert.True(cacheResult.Success);
var cacheDir = Path.Combine(temp.Path, ".evidence");
Assert.True(Directory.Exists(cacheDir));
Assert.True(File.Exists(Path.Combine(cacheDir, "manifest.json")));
Assert.True(File.Exists(Path.Combine(cacheDir, "bundles", "alert-1.evidence.json")));
Assert.True(File.Exists(Path.Combine(cacheDir, "enrichment_queue.json")));
var statistics = await service.GetStatisticsAsync(temp.Path, CancellationToken.None);
Assert.Equal(1, statistics.TotalBundles);
Assert.Equal(0, statistics.FullyAvailable);
Assert.Equal(0, statistics.PartiallyAvailable);
Assert.Equal(1, statistics.PendingEnrichment);
Assert.True(statistics.OfflineResolvablePercentage >= 99.99);
Assert.True(statistics.TotalSizeBytes > 0);
}
[Fact]
public async Task QueueEnrichmentAsync_DeduplicatesRequests()
{
using var temp = new TempDirectory();
var service = new LocalEvidenceCacheService(TimeProvider.System, NullLogger<LocalEvidenceCacheService>.Instance);
var request = new EnrichmentRequest
{
AlertId = "alert-1",
ArtifactId = "scan-1",
EvidenceType = "reachability",
Reason = "missing",
QueuedAt = DateTimeOffset.MinValue,
AttemptCount = 0
};
await service.QueueEnrichmentAsync(temp.Path, request, CancellationToken.None);
await service.QueueEnrichmentAsync(temp.Path, request with { Reason = "still missing" }, CancellationToken.None);
var queuePath = Path.Combine(temp.Path, ".evidence", "enrichment_queue.json");
Assert.True(File.Exists(queuePath));
using var document = JsonDocument.Parse(await File.ReadAllTextAsync(queuePath, CancellationToken.None));
var requests = document.RootElement.GetProperty("requests");
Assert.Equal(1, requests.GetArrayLength());
Assert.Equal("alert-1", requests[0].GetProperty("alert_id").GetString());
Assert.Equal("reachability", requests[0].GetProperty("evidence_type").GetString());
}
[Fact]
public async Task ProcessEnrichmentQueueAsync_IncrementsAttemptCounts()
{
using var temp = new TempDirectory();
var service = new LocalEvidenceCacheService(TimeProvider.System, NullLogger<LocalEvidenceCacheService>.Instance);
await service.QueueEnrichmentAsync(
temp.Path,
new EnrichmentRequest
{
AlertId = "alert-1",
ArtifactId = "scan-1",
EvidenceType = "provenance",
QueuedAt = DateTimeOffset.MinValue,
AttemptCount = 0
},
CancellationToken.None);
var result = await service.ProcessEnrichmentQueueAsync(temp.Path, CancellationToken.None);
Assert.Equal(0, result.ProcessedCount);
Assert.Equal(1, result.FailedCount);
Assert.Equal(1, result.RemainingCount);
var queuePath = Path.Combine(temp.Path, ".evidence", "enrichment_queue.json");
using var document = JsonDocument.Parse(await File.ReadAllTextAsync(queuePath, CancellationToken.None));
var requests = document.RootElement.GetProperty("requests");
Assert.Equal(1, requests.GetArrayLength());
Assert.Equal(1, requests[0].GetProperty("attempt_count").GetInt32());
}
private sealed class TempDirectory : IDisposable
{
public TempDirectory()
{
Path = Directory.CreateTempSubdirectory("stellaops-exportcache-").FullName;
}
public string Path { get; }
public void Dispose()
{
try
{
if (Directory.Exists(Path))
{
Directory.Delete(Path, recursive: true);
}
}
catch
{
}
}
}
}

View File

@@ -0,0 +1,74 @@
namespace StellaOps.Orchestrator.Core.Domain;
/// <summary>
/// Represents the first meaningful signal for a job/run.
/// </summary>
public sealed record FirstSignal
{
public required string Version { get; init; } = "1.0";
public required string SignalId { get; init; }
public required Guid JobId { get; init; }
public required DateTimeOffset Timestamp { get; init; }
public required FirstSignalKind Kind { get; init; }
public required FirstSignalPhase Phase { get; init; }
public required FirstSignalScope Scope { get; init; }
public required string Summary { get; init; }
public int? EtaSeconds { get; init; }
public LastKnownOutcome? LastKnownOutcome { get; init; }
public IReadOnlyList<NextAction>? NextActions { get; init; }
public required FirstSignalDiagnostics Diagnostics { get; init; }
}
public enum FirstSignalKind
{
Queued,
Started,
Phase,
Blocked,
Failed,
Succeeded,
Canceled,
Unavailable
}
public enum FirstSignalPhase
{
Resolve,
Fetch,
Restore,
Analyze,
Policy,
Report,
Unknown
}
public sealed record FirstSignalScope
{
public required string Type { get; init; } // "repo" | "image" | "artifact"
public required string Id { get; init; }
}
public sealed record LastKnownOutcome
{
public required string SignatureId { get; init; }
public string? ErrorCode { get; init; }
public required string Token { get; init; }
public string? Excerpt { get; init; }
public required string Confidence { get; init; } // "low" | "medium" | "high"
public required DateTimeOffset FirstSeenAt { get; init; }
public required int HitCount { get; init; }
}
public sealed record NextAction
{
public required string Type { get; init; } // "open_logs" | "open_job" | "docs" | "retry" | "cli_command"
public required string Label { get; init; }
public required string Target { get; init; }
}
public sealed record FirstSignalDiagnostics
{
public required bool CacheHit { get; init; }
public required string Source { get; init; } // "snapshot" | "failure_index" | "cold_start"
public required string CorrelationId { get; init; }
}

View File

@@ -0,0 +1,37 @@
namespace StellaOps.Orchestrator.Core.Repositories;
public interface IFirstSignalSnapshotRepository
{
Task<FirstSignalSnapshot?> GetByRunIdAsync(
string tenantId,
Guid runId,
CancellationToken cancellationToken = default);
Task UpsertAsync(
FirstSignalSnapshot snapshot,
CancellationToken cancellationToken = default);
Task DeleteByRunIdAsync(
string tenantId,
Guid runId,
CancellationToken cancellationToken = default);
}
public sealed record FirstSignalSnapshot
{
public required string TenantId { get; init; }
public required Guid RunId { get; init; }
public required Guid JobId { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required DateTimeOffset UpdatedAt { get; init; }
public required string Kind { get; init; }
public required string Phase { get; init; }
public required string Summary { get; init; }
public int? EtaSeconds { get; init; }
public string? LastKnownOutcomeJson { get; init; }
public string? NextActionsJson { get; init; }
public required string DiagnosticsJson { get; init; }
public required string SignalJson { get; init; }
}

View File

@@ -0,0 +1,50 @@
using StellaOps.Orchestrator.Core.Domain;
namespace StellaOps.Orchestrator.Core.Services;
public interface IFirstSignalService
{
/// <summary>
/// Gets the first signal for a run, checking cache first.
/// </summary>
Task<FirstSignalResult> GetFirstSignalAsync(
Guid runId,
string tenantId,
string? ifNoneMatch = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the first signal snapshot for a run and invalidates any cached copies.
/// </summary>
Task UpdateSnapshotAsync(
Guid runId,
string tenantId,
FirstSignal signal,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates cached first signal for a run.
/// </summary>
Task InvalidateCacheAsync(
Guid runId,
string tenantId,
CancellationToken cancellationToken = default);
}
public sealed record FirstSignalResult
{
public required FirstSignalResultStatus Status { get; init; }
public FirstSignal? Signal { get; init; }
public string? ETag { get; init; }
public bool CacheHit { get; init; }
public string? Source { get; init; }
}
public enum FirstSignalResultStatus
{
Found,
NotModified,
NotFound,
NotAvailable,
Error
}

View File

@@ -0,0 +1,149 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Infrastructure.Options;
namespace StellaOps.Orchestrator.Infrastructure.Caching;
public interface IFirstSignalCache
{
string ProviderName { get; }
ValueTask<CacheResult<FirstSignalCacheEntry>> GetAsync(
string tenantId,
Guid runId,
CancellationToken cancellationToken = default);
ValueTask SetAsync(
string tenantId,
Guid runId,
FirstSignalCacheEntry entry,
CancellationToken cancellationToken = default);
ValueTask<bool> InvalidateAsync(
string tenantId,
Guid runId,
CancellationToken cancellationToken = default);
}
public sealed record FirstSignalCacheEntry
{
public required FirstSignal Signal { get; init; }
public required string ETag { get; init; }
public required string Origin { get; init; }
}
public sealed class FirstSignalCache : IFirstSignalCache
{
private readonly IDistributedCache<FirstSignalCacheEntry>? _cache;
private readonly FirstSignalCacheOptions _options;
private readonly ILogger<FirstSignalCache> _logger;
public FirstSignalCache(
IOptions<FirstSignalOptions> options,
ILogger<FirstSignalCache> logger,
IDistributedCacheFactory? cacheFactory = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value.Cache ?? new FirstSignalCacheOptions();
var configuredBackend = _options.Backend?.Trim().ToLowerInvariant();
if (configuredBackend == "none")
{
ProviderName = "none";
return;
}
if (cacheFactory is null)
{
ProviderName = "none";
return;
}
try
{
ProviderName = cacheFactory.ProviderName;
if (!string.IsNullOrWhiteSpace(configuredBackend) &&
!string.Equals(configuredBackend, ProviderName, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"FirstSignal cache backend is configured as {ConfiguredBackend} but active cache provider is {ProviderName}.",
configuredBackend,
ProviderName);
}
_cache = cacheFactory.Create<FirstSignalCacheEntry>(new CacheOptions
{
KeyPrefix = _options.KeyPrefix,
DefaultTtl = TimeSpan.FromSeconds(_options.TtlSeconds),
SlidingExpiration = _options.SlidingExpiration
});
}
catch (Exception ex)
{
ProviderName = "none";
_logger.LogWarning(ex, "Failed to initialize distributed cache; disabling first-signal caching.");
}
}
public string ProviderName { get; }
public async ValueTask<CacheResult<FirstSignalCacheEntry>> GetAsync(
string tenantId,
Guid runId,
CancellationToken cancellationToken = default)
{
if (_cache is null)
{
return CacheResult<FirstSignalCacheEntry>.Miss();
}
var key = BuildKey(tenantId, runId);
return await _cache.GetAsync(key, cancellationToken).ConfigureAwait(false);
}
public async ValueTask SetAsync(
string tenantId,
Guid runId,
FirstSignalCacheEntry entry,
CancellationToken cancellationToken = default)
{
if (_cache is null)
{
return;
}
ArgumentNullException.ThrowIfNull(entry);
var key = BuildKey(tenantId, runId);
await _cache.SetAsync(key, entry, null, cancellationToken).ConfigureAwait(false);
}
public async ValueTask<bool> InvalidateAsync(
string tenantId,
Guid runId,
CancellationToken cancellationToken = default)
{
if (_cache is null)
{
return false;
}
var key = BuildKey(tenantId, runId);
return await _cache.InvalidateAsync(key, cancellationToken).ConfigureAwait(false);
}
private static string BuildKey(string tenantId, Guid runId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
if (runId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(runId));
}
return $"tenant:{tenantId.Trim()}:signal:run:{runId:D}";
}
}

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Orchestrator.Infrastructure.Options;
public sealed class FirstSignalOptions
{
public const string SectionName = "FirstSignal";
public FirstSignalCacheOptions Cache { get; set; } = new();
public FirstSignalColdPathOptions ColdPath { get; set; } = new();
public FirstSignalSnapshotWriterOptions SnapshotWriter { get; set; } = new();
}
public sealed class FirstSignalCacheOptions
{
public string Backend { get; set; } = "inmemory"; // inmemory | valkey | postgres | none
public int TtlSeconds { get; set; } = 86400;
public bool SlidingExpiration { get; set; } = true;
public string KeyPrefix { get; set; } = "orchestrator:first_signal:";
}
public sealed class FirstSignalColdPathOptions
{
public int TimeoutMs { get; set; } = 3000;
}
public sealed class FirstSignalSnapshotWriterOptions
{
public bool Enabled { get; set; }
public string? TenantId { get; set; }
public int PollIntervalSeconds { get; set; } = 10;
public int MaxRunsPerTick { get; set; } = 50;
public int LookbackMinutes { get; set; } = 60;
}

View File

@@ -0,0 +1,171 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Orchestrator.Core.Repositories;
namespace StellaOps.Orchestrator.Infrastructure.Postgres;
public sealed class PostgresFirstSignalSnapshotRepository : IFirstSignalSnapshotRepository
{
private const string SelectColumns = """
tenant_id, run_id, job_id, created_at, updated_at,
kind, phase, summary, eta_seconds,
last_known_outcome, next_actions, diagnostics, signal_json
""";
private const string SelectByRunIdSql = $"""
SELECT {SelectColumns}
FROM first_signal_snapshots
WHERE tenant_id = @tenant_id AND run_id = @run_id
LIMIT 1
""";
private const string DeleteByRunIdSql = """
DELETE FROM first_signal_snapshots
WHERE tenant_id = @tenant_id AND run_id = @run_id
""";
private const string UpsertSql = """
INSERT INTO first_signal_snapshots (
tenant_id, run_id, job_id, created_at, updated_at,
kind, phase, summary, eta_seconds,
last_known_outcome, next_actions, diagnostics, signal_json)
VALUES (
@tenant_id, @run_id, @job_id, @created_at, @updated_at,
@kind, @phase, @summary, @eta_seconds,
@last_known_outcome, @next_actions, @diagnostics, @signal_json)
ON CONFLICT (tenant_id, run_id) DO UPDATE SET
job_id = EXCLUDED.job_id,
updated_at = EXCLUDED.updated_at,
kind = EXCLUDED.kind,
phase = EXCLUDED.phase,
summary = EXCLUDED.summary,
eta_seconds = EXCLUDED.eta_seconds,
last_known_outcome = EXCLUDED.last_known_outcome,
next_actions = EXCLUDED.next_actions,
diagnostics = EXCLUDED.diagnostics,
signal_json = EXCLUDED.signal_json
""";
private readonly OrchestratorDataSource _dataSource;
private readonly ILogger<PostgresFirstSignalSnapshotRepository> _logger;
public PostgresFirstSignalSnapshotRepository(
OrchestratorDataSource dataSource,
ILogger<PostgresFirstSignalSnapshotRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<FirstSignalSnapshot?> GetByRunIdAsync(string tenantId, Guid runId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
if (runId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(runId));
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectByRunIdSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("run_id", runId);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
return MapSnapshot(reader);
}
public async Task UpsertAsync(FirstSignalSnapshot snapshot, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(snapshot);
ArgumentException.ThrowIfNullOrWhiteSpace(snapshot.TenantId);
if (snapshot.RunId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(snapshot));
}
await using var connection = await _dataSource.OpenConnectionAsync(snapshot.TenantId, "writer", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(UpsertSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", snapshot.TenantId);
command.Parameters.AddWithValue("run_id", snapshot.RunId);
command.Parameters.AddWithValue("job_id", snapshot.JobId);
command.Parameters.AddWithValue("created_at", snapshot.CreatedAt);
command.Parameters.AddWithValue("updated_at", snapshot.UpdatedAt);
command.Parameters.AddWithValue("kind", snapshot.Kind);
command.Parameters.AddWithValue("phase", snapshot.Phase);
command.Parameters.AddWithValue("summary", snapshot.Summary);
command.Parameters.AddWithValue("eta_seconds", (object?)snapshot.EtaSeconds ?? DBNull.Value);
command.Parameters.Add(new NpgsqlParameter("last_known_outcome", NpgsqlDbType.Jsonb)
{
Value = (object?)snapshot.LastKnownOutcomeJson ?? DBNull.Value
});
command.Parameters.Add(new NpgsqlParameter("next_actions", NpgsqlDbType.Jsonb)
{
Value = (object?)snapshot.NextActionsJson ?? DBNull.Value
});
command.Parameters.Add(new NpgsqlParameter("diagnostics", NpgsqlDbType.Jsonb)
{
Value = snapshot.DiagnosticsJson
});
command.Parameters.Add(new NpgsqlParameter("signal_json", NpgsqlDbType.Jsonb)
{
Value = snapshot.SignalJson
});
try
{
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
catch (PostgresException ex)
{
_logger.LogError(ex, "Failed to upsert first signal snapshot for tenant {TenantId} run {RunId}.", snapshot.TenantId, snapshot.RunId);
throw;
}
}
public async Task DeleteByRunIdAsync(string tenantId, Guid runId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
if (runId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(runId));
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(DeleteByRunIdSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("run_id", runId);
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
private static FirstSignalSnapshot MapSnapshot(NpgsqlDataReader reader)
{
return new FirstSignalSnapshot
{
TenantId = reader.GetString(0),
RunId = reader.GetGuid(1),
JobId = reader.GetGuid(2),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(3),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(4),
Kind = reader.GetString(5),
Phase = reader.GetString(6),
Summary = reader.GetString(7),
EtaSeconds = reader.IsDBNull(8) ? null : reader.GetInt32(8),
LastKnownOutcomeJson = reader.IsDBNull(9) ? null : reader.GetString(9),
NextActionsJson = reader.IsDBNull(10) ? null : reader.GetString(10),
DiagnosticsJson = reader.GetString(11),
SignalJson = reader.GetString(12),
};
}
}

View File

@@ -2,11 +2,14 @@ using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Orchestrator.Core.Backfill;
using StellaOps.Orchestrator.Core.Observability;
using StellaOps.Orchestrator.Core.Repositories;
using StellaOps.Orchestrator.Infrastructure.Ledger;
using StellaOps.Orchestrator.Infrastructure.Observability;
using StellaOps.Orchestrator.Infrastructure.Caching;
using StellaOps.Orchestrator.Infrastructure.Options;
using StellaOps.Orchestrator.Infrastructure.Postgres;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using StellaOps.Orchestrator.Infrastructure.Services;
namespace StellaOps.Orchestrator.Infrastructure;
@@ -44,6 +47,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<IPackRunRepository, PostgresPackRunRepository>();
services.AddScoped<IPackRunLogRepository, PostgresPackRunLogRepository>();
services.AddScoped<IPackRegistryRepository, PostgresPackRegistryRepository>();
services.AddScoped<IFirstSignalSnapshotRepository, PostgresFirstSignalSnapshotRepository>();
// Register audit and ledger repositories
services.AddScoped<IAuditRepository, PostgresAuditRepository>();
@@ -67,6 +71,11 @@ public static class ServiceCollectionExtensions
services.AddSingleton(incidentModeOptions);
services.AddSingleton<IIncidentModeHooks, IncidentModeHooks>();
// First signal (TTFS) services
services.Configure<FirstSignalOptions>(configuration.GetSection(FirstSignalOptions.SectionName));
services.AddSingleton<IFirstSignalCache, FirstSignalCache>();
services.AddScoped<StellaOps.Orchestrator.Core.Services.IFirstSignalService, FirstSignalService>();
return services;
}
}

View File

@@ -0,0 +1,571 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Hashing;
using StellaOps.Orchestrator.Core.Repositories;
using StellaOps.Orchestrator.Infrastructure.Caching;
using StellaOps.Orchestrator.Infrastructure.Options;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using StellaOps.Telemetry.Core;
using CoreServices = StellaOps.Orchestrator.Core.Services;
namespace StellaOps.Orchestrator.Infrastructure.Services;
public sealed class FirstSignalService : CoreServices.IFirstSignalService
{
private static readonly JsonSerializerOptions SignalJsonOptions = new()
{
PropertyNameCaseInsensitive = true,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
private readonly IFirstSignalCache _cache;
private readonly IFirstSignalSnapshotRepository _snapshotRepository;
private readonly IRunRepository _runRepository;
private readonly IJobRepository _jobRepository;
private readonly TimeProvider _timeProvider;
private readonly TimeToFirstSignalMetrics _ttfsMetrics;
private readonly FirstSignalOptions _options;
private readonly ILogger<FirstSignalService> _logger;
public FirstSignalService(
IFirstSignalCache cache,
IFirstSignalSnapshotRepository snapshotRepository,
IRunRepository runRepository,
IJobRepository jobRepository,
TimeProvider timeProvider,
TimeToFirstSignalMetrics ttfsMetrics,
IOptions<FirstSignalOptions> options,
ILogger<FirstSignalService> logger)
{
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_snapshotRepository = snapshotRepository ?? throw new ArgumentNullException(nameof(snapshotRepository));
_runRepository = runRepository ?? throw new ArgumentNullException(nameof(runRepository));
_jobRepository = jobRepository ?? throw new ArgumentNullException(nameof(jobRepository));
_timeProvider = timeProvider ?? TimeProvider.System;
_ttfsMetrics = ttfsMetrics ?? throw new ArgumentNullException(nameof(ttfsMetrics));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<CoreServices.FirstSignalResult> GetFirstSignalAsync(
Guid runId,
string tenantId,
string? ifNoneMatch = null,
CancellationToken cancellationToken = default)
{
if (runId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(runId));
}
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var overallStopwatch = Stopwatch.StartNew();
// 1) Cache fast path
var cacheLookupStopwatch = Stopwatch.StartNew();
var cacheResult = await _cache.GetAsync(tenantId, runId, cancellationToken).ConfigureAwait(false);
cacheLookupStopwatch.Stop();
if (cacheResult.HasValue)
{
var cached = cacheResult.Value;
var signal = cached.Signal;
var etag = cached.ETag;
var origin = string.IsNullOrWhiteSpace(cached.Origin) ? "snapshot" : cached.Origin.Trim().ToLowerInvariant();
_ttfsMetrics.RecordCacheLookup(
cacheLookupStopwatch.Elapsed.TotalSeconds,
surface: "api",
cacheHit: true,
signalSource: origin,
kind: MapKind(signal.Kind),
phase: MapPhase(signal.Phase),
tenantId: tenantId);
if (IsNotModified(ifNoneMatch, etag))
{
RecordSignalRendered(overallStopwatch, cacheHit: true, origin, signal.Kind, signal.Phase, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.NotModified,
CacheHit = true,
Source = origin,
ETag = etag,
Signal = signal with
{
Diagnostics = signal.Diagnostics with
{
CacheHit = true,
Source = origin,
}
}
};
}
RecordSignalRendered(overallStopwatch, cacheHit: true, origin, signal.Kind, signal.Phase, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.Found,
CacheHit = true,
Source = origin,
ETag = etag,
Signal = signal with
{
Diagnostics = signal.Diagnostics with
{
CacheHit = true,
Source = origin,
}
}
};
}
_ttfsMetrics.RecordCacheLookup(
cacheLookupStopwatch.Elapsed.TotalSeconds,
surface: "api",
cacheHit: false,
signalSource: null,
kind: TtfsSignalKind.Unavailable,
phase: TtfsPhase.Unknown,
tenantId: tenantId);
// 2) Snapshot fast path
var snapshot = await _snapshotRepository.GetByRunIdAsync(tenantId, runId, cancellationToken).ConfigureAwait(false);
if (snapshot is not null)
{
var signal = TryDeserializeSignal(snapshot.SignalJson);
if (signal is not null)
{
var etag = GenerateEtag(signal);
var origin = "snapshot";
if (IsNotModified(ifNoneMatch, etag))
{
RecordSignalRendered(overallStopwatch, cacheHit: false, origin, signal.Kind, signal.Phase, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.NotModified,
CacheHit = false,
Source = origin,
ETag = etag,
Signal = signal with
{
Diagnostics = signal.Diagnostics with
{
CacheHit = false,
Source = origin,
}
}
};
}
await _cache.SetAsync(
tenantId,
runId,
new FirstSignalCacheEntry
{
Signal = signal,
ETag = etag,
Origin = origin,
},
cancellationToken)
.ConfigureAwait(false);
RecordSignalRendered(overallStopwatch, cacheHit: false, origin, signal.Kind, signal.Phase, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.Found,
CacheHit = false,
Source = origin,
ETag = etag,
Signal = signal with
{
Diagnostics = signal.Diagnostics with
{
CacheHit = false,
Source = origin,
}
}
};
}
_logger.LogWarning(
"Invalid first signal snapshot JSON for tenant {TenantId} run {RunId}; deleting snapshot row.",
tenantId, runId);
await _snapshotRepository.DeleteByRunIdAsync(tenantId, runId, cancellationToken).ConfigureAwait(false);
}
// 3) Cold path
using var coldPathCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
if (_options.ColdPath.TimeoutMs > 0)
{
coldPathCts.CancelAfter(TimeSpan.FromMilliseconds(_options.ColdPath.TimeoutMs));
}
var coldStopwatch = Stopwatch.StartNew();
var run = await _runRepository.GetByIdAsync(tenantId, runId, coldPathCts.Token).ConfigureAwait(false);
if (run is null)
{
RecordSignalRendered(overallStopwatch, cacheHit: false, origin: null, TtfsSignalKind.Unavailable, TtfsPhase.Unknown, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.NotFound,
CacheHit = false,
Source = null,
ETag = null,
Signal = null,
};
}
var jobs = await _jobRepository.GetByRunIdAsync(tenantId, runId, coldPathCts.Token).ConfigureAwait(false);
coldStopwatch.Stop();
if (jobs.Count == 0)
{
RecordSignalRendered(overallStopwatch, cacheHit: false, origin: "cold_start", TtfsSignalKind.Unavailable, TtfsPhase.Unknown, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.NotAvailable,
CacheHit = false,
Source = "cold_start",
ETag = null,
Signal = null,
};
}
var signalComputed = ComputeSignal(run, jobs, cacheHit: false, origin: "cold_start");
var computedEtag = GenerateEtag(signalComputed);
_ttfsMetrics.RecordColdPathComputation(
coldStopwatch.Elapsed.TotalSeconds,
surface: "api",
signalSource: "cold_start",
kind: MapKind(signalComputed.Kind),
phase: MapPhase(signalComputed.Phase),
tenantId: tenantId);
await UpdateSnapshotAsyncInternal(runId, tenantId, signalComputed, cancellationToken).ConfigureAwait(false);
await _cache.SetAsync(
tenantId,
runId,
new FirstSignalCacheEntry
{
Signal = signalComputed,
ETag = computedEtag,
Origin = "cold_start",
},
cancellationToken)
.ConfigureAwait(false);
if (IsNotModified(ifNoneMatch, computedEtag))
{
RecordSignalRendered(overallStopwatch, cacheHit: false, origin: "cold_start", signalComputed.Kind, signalComputed.Phase, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.NotModified,
CacheHit = false,
Source = "cold_start",
ETag = computedEtag,
Signal = signalComputed,
};
}
RecordSignalRendered(overallStopwatch, cacheHit: false, origin: "cold_start", signalComputed.Kind, signalComputed.Phase, tenantId);
return new CoreServices.FirstSignalResult
{
Status = CoreServices.FirstSignalResultStatus.Found,
CacheHit = false,
Source = "cold_start",
ETag = computedEtag,
Signal = signalComputed,
};
}
public async Task UpdateSnapshotAsync(Guid runId, string tenantId, FirstSignal signal, CancellationToken cancellationToken = default)
{
if (runId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(runId));
}
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(signal);
await UpdateSnapshotAsyncInternal(runId, tenantId, signal with
{
Diagnostics = signal.Diagnostics with
{
CacheHit = false,
Source = "snapshot",
}
}, cancellationToken).ConfigureAwait(false);
await _cache.InvalidateAsync(tenantId, runId, cancellationToken).ConfigureAwait(false);
}
public async Task InvalidateCacheAsync(Guid runId, string tenantId, CancellationToken cancellationToken = default)
{
if (runId == Guid.Empty)
{
throw new ArgumentException("Run ID must be a non-empty GUID.", nameof(runId));
}
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
await _cache.InvalidateAsync(tenantId, runId, cancellationToken).ConfigureAwait(false);
}
private async Task UpdateSnapshotAsyncInternal(Guid runId, string tenantId, FirstSignal signal, CancellationToken cancellationToken)
{
var now = _timeProvider.GetUtcNow();
var signalJson = CanonicalJsonHasher.ToCanonicalJson(signal);
var snapshot = new FirstSignalSnapshot
{
TenantId = tenantId,
RunId = runId,
JobId = signal.JobId,
CreatedAt = now,
UpdatedAt = now,
Kind = signal.Kind.ToString().ToLowerInvariant(),
Phase = signal.Phase.ToString().ToLowerInvariant(),
Summary = signal.Summary,
EtaSeconds = signal.EtaSeconds,
LastKnownOutcomeJson = signal.LastKnownOutcome is null
? null
: JsonSerializer.Serialize(signal.LastKnownOutcome, SignalJsonOptions),
NextActionsJson = signal.NextActions is null
? null
: JsonSerializer.Serialize(signal.NextActions, SignalJsonOptions),
DiagnosticsJson = JsonSerializer.Serialize(signal.Diagnostics, SignalJsonOptions),
SignalJson = signalJson,
};
await _snapshotRepository.UpsertAsync(snapshot, cancellationToken).ConfigureAwait(false);
}
private static FirstSignal ComputeSignal(Run run, IReadOnlyList<Job> jobs, bool cacheHit, string origin)
{
ArgumentNullException.ThrowIfNull(run);
ArgumentNullException.ThrowIfNull(jobs);
var job = SelectRepresentativeJob(run, jobs);
var hasLeasedJob = jobs.Any(j => j.Status == JobStatus.Leased);
var kind = hasLeasedJob
? FirstSignalKind.Started
: run.Status switch
{
RunStatus.Failed => FirstSignalKind.Failed,
RunStatus.Canceled => FirstSignalKind.Canceled,
RunStatus.Succeeded or RunStatus.PartiallySucceeded => FirstSignalKind.Succeeded,
_ => FirstSignalKind.Queued
};
var phase = FirstSignalPhase.Unknown;
var timestamp = ResolveTimestamp(run, job, kind);
var correlationId = run.CorrelationId ?? job.CorrelationId ?? string.Empty;
var signalId = $"{run.RunId:D}:{job.JobId:D}:{kind.ToString().ToLowerInvariant()}:{phase.ToString().ToLowerInvariant()}:{timestamp.ToUnixTimeMilliseconds()}";
var summary = kind switch
{
FirstSignalKind.Queued => "Run queued",
FirstSignalKind.Started => "Run started",
FirstSignalKind.Succeeded => "Run completed",
FirstSignalKind.Failed => "Run failed",
FirstSignalKind.Canceled => "Run canceled",
_ => "Run update"
};
return new FirstSignal
{
Version = "1.0",
SignalId = signalId,
JobId = job.JobId,
Timestamp = timestamp,
Kind = kind,
Phase = phase,
Scope = new FirstSignalScope { Type = "run", Id = run.RunId.ToString("D") },
Summary = summary,
EtaSeconds = null,
LastKnownOutcome = null,
NextActions = null,
Diagnostics = new FirstSignalDiagnostics
{
CacheHit = cacheHit,
Source = origin,
CorrelationId = correlationId
}
};
}
private static Job SelectRepresentativeJob(Run run, IReadOnlyList<Job> jobs)
{
// Prefer an in-flight job to surface "started" quickly, even if Run.Status hasn't transitioned yet.
var leased = jobs
.Where(j => j.Status == JobStatus.Leased)
.OrderBy(j => j.LeasedAt ?? DateTimeOffset.MaxValue)
.ThenBy(j => j.CreatedAt)
.FirstOrDefault();
if (leased is not null)
{
return leased;
}
// Prefer earliest completed job when run is terminal.
if (run.Status is RunStatus.Succeeded or RunStatus.PartiallySucceeded or RunStatus.Failed or RunStatus.Canceled)
{
var terminal = jobs
.Where(j => j.Status is JobStatus.Succeeded or JobStatus.Failed or JobStatus.Canceled or JobStatus.TimedOut)
.OrderBy(j => j.CompletedAt ?? DateTimeOffset.MaxValue)
.ThenBy(j => j.CreatedAt)
.FirstOrDefault();
if (terminal is not null)
{
return terminal;
}
}
// Otherwise, use the earliest-created job as representative.
return jobs.OrderBy(j => j.CreatedAt).First();
}
private static DateTimeOffset ResolveTimestamp(Run run, Job job, FirstSignalKind kind)
{
return kind switch
{
FirstSignalKind.Started => job.LeasedAt ?? run.StartedAt ?? run.CreatedAt,
FirstSignalKind.Succeeded or FirstSignalKind.Failed or FirstSignalKind.Canceled => job.CompletedAt ?? run.CompletedAt ?? run.CreatedAt,
_ => job.ScheduledAt ?? job.CreatedAt
};
}
private static FirstSignal? TryDeserializeSignal(string json)
{
if (string.IsNullOrWhiteSpace(json))
{
return null;
}
try
{
return JsonSerializer.Deserialize<FirstSignal>(json, SignalJsonOptions);
}
catch
{
return null;
}
}
private static string GenerateEtag(FirstSignal signal)
{
var material = new
{
signal.Version,
signal.JobId,
signal.Timestamp,
signal.Kind,
signal.Phase,
signal.Scope,
signal.Summary,
signal.EtaSeconds,
signal.LastKnownOutcome,
signal.NextActions
};
var canonicalJson = CanonicalJsonHasher.ToCanonicalJson(material);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
var base64 = Convert.ToBase64String(hash.AsSpan(0, 8));
return $"W/\"{base64}\"";
}
private static bool IsNotModified(string? ifNoneMatch, string etag)
{
if (string.IsNullOrWhiteSpace(ifNoneMatch))
{
return false;
}
var candidates = ifNoneMatch
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Select(t => t.Trim())
.ToList();
if (candidates.Any(t => t == "*"))
{
return true;
}
return candidates.Any(t => string.Equals(t, etag, StringComparison.Ordinal));
}
private static TtfsSignalKind MapKind(FirstSignalKind kind) => kind switch
{
FirstSignalKind.Queued => TtfsSignalKind.Queued,
FirstSignalKind.Started => TtfsSignalKind.Started,
FirstSignalKind.Phase => TtfsSignalKind.Phase,
FirstSignalKind.Blocked => TtfsSignalKind.Blocked,
FirstSignalKind.Failed => TtfsSignalKind.Failed,
FirstSignalKind.Succeeded => TtfsSignalKind.Succeeded,
FirstSignalKind.Canceled => TtfsSignalKind.Canceled,
_ => TtfsSignalKind.Unavailable,
};
private static TtfsPhase MapPhase(FirstSignalPhase phase) => phase switch
{
FirstSignalPhase.Resolve => TtfsPhase.Resolve,
FirstSignalPhase.Fetch => TtfsPhase.Fetch,
FirstSignalPhase.Restore => TtfsPhase.Restore,
FirstSignalPhase.Analyze => TtfsPhase.Analyze,
FirstSignalPhase.Policy => TtfsPhase.Policy,
FirstSignalPhase.Report => TtfsPhase.Report,
_ => TtfsPhase.Unknown,
};
private void RecordSignalRendered(
Stopwatch overallStopwatch,
bool cacheHit,
string? origin,
FirstSignalKind kind,
FirstSignalPhase phase,
string tenantId)
{
_ttfsMetrics.RecordSignalRendered(
latencySeconds: overallStopwatch.Elapsed.TotalSeconds,
surface: "api",
cacheHit: cacheHit,
signalSource: origin,
kind: MapKind(kind),
phase: MapPhase(phase),
tenantId: tenantId);
}
private void RecordSignalRendered(
Stopwatch overallStopwatch,
bool cacheHit,
string? origin,
TtfsSignalKind kind,
TtfsPhase phase,
string tenantId)
{
_ttfsMetrics.RecordSignalRendered(
latencySeconds: overallStopwatch.Elapsed.TotalSeconds,
surface: "api",
cacheHit: cacheHit,
signalSource: origin,
kind: kind,
phase: phase,
tenantId: tenantId);
}
}

View File

@@ -0,0 +1,130 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Infrastructure.Options;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using CoreServices = StellaOps.Orchestrator.Core.Services;
namespace StellaOps.Orchestrator.Infrastructure.Services;
public sealed class FirstSignalSnapshotWriter : BackgroundService
{
private readonly IServiceScopeFactory _scopeFactory;
private readonly FirstSignalSnapshotWriterOptions _options;
private readonly ILogger<FirstSignalSnapshotWriter> _logger;
public FirstSignalSnapshotWriter(
IServiceScopeFactory scopeFactory,
IOptions<FirstSignalOptions> options,
ILogger<FirstSignalSnapshotWriter> logger)
{
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value.SnapshotWriter;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.Enabled)
{
_logger.LogDebug("FirstSignalSnapshotWriter is disabled.");
return;
}
if (string.IsNullOrWhiteSpace(_options.TenantId))
{
_logger.LogWarning(
"FirstSignalSnapshotWriter enabled but no tenant configured; set {Section}:{Key}.",
FirstSignalOptions.SectionName,
$"{nameof(FirstSignalOptions.SnapshotWriter)}:{nameof(FirstSignalSnapshotWriterOptions.TenantId)}");
return;
}
var tenantId = _options.TenantId.Trim();
var lookback = TimeSpan.FromMinutes(Math.Max(1, _options.LookbackMinutes));
var pollInterval = TimeSpan.FromSeconds(Math.Max(1, _options.PollIntervalSeconds));
var maxRuns = Math.Max(1, _options.MaxRunsPerTick);
using var timer = new PeriodicTimer(pollInterval);
while (await timer.WaitForNextTickAsync(stoppingToken).ConfigureAwait(false))
{
try
{
await WarmTenantAsync(tenantId, lookback, maxRuns, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "FirstSignalSnapshotWriter tick failed for tenant {TenantId}.", tenantId);
}
}
}
private async Task WarmTenantAsync(
string tenantId,
TimeSpan lookback,
int maxRuns,
CancellationToken cancellationToken)
{
using var scope = _scopeFactory.CreateScope();
var runRepository = scope.ServiceProvider.GetRequiredService<IRunRepository>();
var firstSignalService = scope.ServiceProvider.GetRequiredService<CoreServices.IFirstSignalService>();
var createdAfter = DateTimeOffset.UtcNow.Subtract(lookback);
var pending = await runRepository.ListAsync(
tenantId,
sourceId: null,
runType: null,
status: RunStatus.Pending,
projectId: null,
createdAfter: createdAfter,
createdBefore: null,
limit: maxRuns,
offset: 0,
cancellationToken: cancellationToken).ConfigureAwait(false);
var running = await runRepository.ListAsync(
tenantId,
sourceId: null,
runType: null,
status: RunStatus.Running,
projectId: null,
createdAfter: createdAfter,
createdBefore: null,
limit: maxRuns,
offset: 0,
cancellationToken: cancellationToken).ConfigureAwait(false);
var candidates = pending
.Concat(running)
.GroupBy(r => r.RunId)
.Select(g => g.First())
.OrderBy(r => r.CreatedAt)
.ThenBy(r => r.RunId)
.Take(maxRuns)
.ToList();
foreach (var run in candidates)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await firstSignalService
.GetFirstSignalAsync(run.RunId, tenantId, ifNoneMatch: null, cancellationToken)
.ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed warming first signal for tenant {TenantId} run {RunId}.", tenantId, run.RunId);
}
}
}
}

View File

@@ -16,6 +16,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/>
<ProjectReference Include="..\..\..\Telemetry\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core.csproj"/>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -0,0 +1,53 @@
-- 008_first_signal_snapshots.sql
-- First Signal snapshots for TTFS fast-path (SPRINT_0339_0001_0001_first_signal_api.md)
BEGIN;
CREATE TABLE first_signal_snapshots (
tenant_id TEXT NOT NULL,
run_id UUID NOT NULL,
job_id UUID NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
kind TEXT NOT NULL CHECK (kind IN (
'queued',
'started',
'phase',
'blocked',
'failed',
'succeeded',
'canceled',
'unavailable'
)),
phase TEXT NOT NULL CHECK (phase IN (
'resolve',
'fetch',
'restore',
'analyze',
'policy',
'report',
'unknown'
)),
summary TEXT NOT NULL,
eta_seconds INT NULL,
last_known_outcome JSONB NULL,
next_actions JSONB NULL,
diagnostics JSONB NOT NULL DEFAULT '{}'::jsonb,
signal_json JSONB NOT NULL,
CONSTRAINT pk_first_signal_snapshots PRIMARY KEY (tenant_id, run_id)
) PARTITION BY LIST (tenant_id);
CREATE TABLE first_signal_snapshots_default PARTITION OF first_signal_snapshots DEFAULT;
CREATE INDEX ix_first_signal_snapshots_job ON first_signal_snapshots (tenant_id, job_id);
CREATE INDEX ix_first_signal_snapshots_updated ON first_signal_snapshots (tenant_id, updated_at DESC);
COMMENT ON TABLE first_signal_snapshots IS 'Per-run cached first-signal payload for TTFS fast path.';
COMMENT ON COLUMN first_signal_snapshots.kind IS 'Current signal kind.';
COMMENT ON COLUMN first_signal_snapshots.phase IS 'Current execution phase.';
COMMENT ON COLUMN first_signal_snapshots.signal_json IS 'Full first-signal payload for ETag and response mapping.';
COMMIT;

View File

@@ -0,0 +1,59 @@
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Options;
using StellaOps.Orchestrator.Infrastructure.Options;
using StellaOps.Orchestrator.WebService.Services;
namespace StellaOps.Orchestrator.Tests.ControlPlane;
public sealed class TenantResolverTests
{
[Fact]
public void ResolveForStreaming_PrefersHeaderWhenPresent()
{
var resolver = new TenantResolver(Options.Create(new OrchestratorServiceOptions
{
TenantHeader = "X-StellaOps-Tenant",
}));
var context = new DefaultHttpContext();
context.Request.Headers["X-StellaOps-Tenant"] = " acme ";
context.Request.QueryString = new QueryString("?tenant=ignored");
var tenant = resolver.ResolveForStreaming(context);
Assert.Equal("acme", tenant);
}
[Fact]
public void ResolveForStreaming_FallsBackToQueryParam()
{
var resolver = new TenantResolver(Options.Create(new OrchestratorServiceOptions
{
TenantHeader = "X-StellaOps-Tenant",
}));
var context = new DefaultHttpContext();
context.Request.QueryString = new QueryString("?tenant=%20acme%20");
var tenant = resolver.ResolveForStreaming(context);
Assert.Equal("acme", tenant);
}
[Fact]
public void ResolveForStreaming_ThrowsWhenTenantMissing()
{
var resolver = new TenantResolver(Options.Create(new OrchestratorServiceOptions
{
TenantHeader = "X-StellaOps-Tenant",
}));
var context = new DefaultHttpContext();
var ex = Assert.Throws<InvalidOperationException>(() => resolver.ResolveForStreaming(context));
Assert.Contains("X-StellaOps-Tenant", ex.Message);
Assert.Contains("tenant", ex.Message);
}
}

View File

@@ -0,0 +1,473 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Messaging;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Hashing;
using StellaOps.Orchestrator.Core.Repositories;
using StellaOps.Orchestrator.Infrastructure.Caching;
using StellaOps.Orchestrator.Infrastructure.Options;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using StellaOps.Orchestrator.Infrastructure.Services;
using StellaOps.Telemetry.Core;
namespace StellaOps.Orchestrator.Tests.Ttfs;
public sealed class FirstSignalServiceTests
{
private const string TenantId = "test-tenant";
[Fact]
public async Task GetFirstSignalAsync_ColdPathThenCacheHit_IfNoneMatch_Returns304()
{
var runId = Guid.NewGuid();
var jobId = Guid.NewGuid();
var now = new DateTimeOffset(2025, 12, 15, 12, 0, 0, TimeSpan.Zero);
var run = new Run(
RunId: runId,
TenantId: TenantId,
ProjectId: null,
SourceId: Guid.NewGuid(),
RunType: "scan",
Status: RunStatus.Pending,
CorrelationId: "corr-1",
TotalJobs: 1,
CompletedJobs: 0,
SucceededJobs: 0,
FailedJobs: 0,
CreatedAt: now,
StartedAt: null,
CompletedAt: null,
CreatedBy: "system",
Metadata: null);
var job = new Job(
JobId: jobId,
TenantId: TenantId,
ProjectId: null,
RunId: runId,
JobType: "scan.image",
Status: JobStatus.Scheduled,
Priority: 0,
Attempt: 1,
MaxAttempts: 3,
PayloadDigest: new string('a', 64),
Payload: "{}",
IdempotencyKey: "idem-1",
CorrelationId: null,
LeaseId: null,
WorkerId: null,
TaskRunnerId: null,
LeaseUntil: null,
CreatedAt: now,
ScheduledAt: now,
LeasedAt: null,
CompletedAt: null,
NotBefore: null,
Reason: null,
ReplayOf: null,
CreatedBy: "system");
var cache = new FakeFirstSignalCache();
var snapshots = new FakeFirstSignalSnapshotRepository();
var runs = new FakeRunRepository(run);
var jobs = new FakeJobRepository(job);
using var ttfs = new TimeToFirstSignalMetrics();
var options = Options.Create(new FirstSignalOptions());
var service = new FirstSignalService(
cache,
snapshots,
runs,
jobs,
TimeProvider.System,
ttfs,
options,
NullLogger<FirstSignalService>.Instance);
var first = await service.GetFirstSignalAsync(runId, TenantId);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.Found, first.Status);
Assert.NotNull(first.ETag);
Assert.False(first.CacheHit);
var second = await service.GetFirstSignalAsync(runId, TenantId, ifNoneMatch: first.ETag);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.NotModified, second.Status);
Assert.True(second.CacheHit);
}
[Fact]
public async Task GetFirstSignalAsync_RunPendingButJobLeased_ReturnsStarted()
{
var runId = Guid.NewGuid();
var jobId = Guid.NewGuid();
var now = new DateTimeOffset(2025, 12, 15, 12, 0, 0, TimeSpan.Zero);
var run = new Run(
RunId: runId,
TenantId: TenantId,
ProjectId: null,
SourceId: Guid.NewGuid(),
RunType: "scan",
Status: RunStatus.Pending,
CorrelationId: null,
TotalJobs: 1,
CompletedJobs: 0,
SucceededJobs: 0,
FailedJobs: 0,
CreatedAt: now,
StartedAt: null,
CompletedAt: null,
CreatedBy: "system",
Metadata: null);
var job = new Job(
JobId: jobId,
TenantId: TenantId,
ProjectId: null,
RunId: runId,
JobType: "scan.image",
Status: JobStatus.Leased,
Priority: 0,
Attempt: 1,
MaxAttempts: 3,
PayloadDigest: new string('a', 64),
Payload: "{}",
IdempotencyKey: "idem-1",
CorrelationId: null,
LeaseId: Guid.NewGuid(),
WorkerId: "worker-1",
TaskRunnerId: null,
LeaseUntil: now.AddMinutes(5),
CreatedAt: now,
ScheduledAt: now,
LeasedAt: now.AddSeconds(10),
CompletedAt: null,
NotBefore: null,
Reason: null,
ReplayOf: null,
CreatedBy: "system");
using var ttfs = new TimeToFirstSignalMetrics();
var service = new FirstSignalService(
cache: new FakeFirstSignalCache(),
snapshotRepository: new FakeFirstSignalSnapshotRepository(),
runRepository: new FakeRunRepository(run),
jobRepository: new FakeJobRepository(job),
timeProvider: TimeProvider.System,
ttfsMetrics: ttfs,
options: Options.Create(new FirstSignalOptions()),
logger: NullLogger<FirstSignalService>.Instance);
var result = await service.GetFirstSignalAsync(runId, TenantId);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.Found, result.Status);
Assert.NotNull(result.Signal);
Assert.Equal(FirstSignalKind.Started, result.Signal!.Kind);
}
[Fact]
public async Task GetFirstSignalAsync_RunMissing_Returns404()
{
using var ttfs = new TimeToFirstSignalMetrics();
var service = new FirstSignalService(
cache: new FakeFirstSignalCache(),
snapshotRepository: new FakeFirstSignalSnapshotRepository(),
runRepository: new FakeRunRepository(null),
jobRepository: new FakeJobRepository(),
timeProvider: TimeProvider.System,
ttfsMetrics: ttfs,
options: Options.Create(new FirstSignalOptions()),
logger: NullLogger<FirstSignalService>.Instance);
var result = await service.GetFirstSignalAsync(Guid.NewGuid(), TenantId);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.NotFound, result.Status);
}
[Fact]
public async Task GetFirstSignalAsync_RunWithNoJobs_Returns204()
{
var run = new Run(
RunId: Guid.NewGuid(),
TenantId: TenantId,
ProjectId: null,
SourceId: Guid.NewGuid(),
RunType: "scan",
Status: RunStatus.Pending,
CorrelationId: null,
TotalJobs: 0,
CompletedJobs: 0,
SucceededJobs: 0,
FailedJobs: 0,
CreatedAt: DateTimeOffset.UtcNow,
StartedAt: null,
CompletedAt: null,
CreatedBy: "system",
Metadata: null);
using var ttfs = new TimeToFirstSignalMetrics();
var service = new FirstSignalService(
cache: new FakeFirstSignalCache(),
snapshotRepository: new FakeFirstSignalSnapshotRepository(),
runRepository: new FakeRunRepository(run),
jobRepository: new FakeJobRepository(),
timeProvider: TimeProvider.System,
ttfsMetrics: ttfs,
options: Options.Create(new FirstSignalOptions()),
logger: NullLogger<FirstSignalService>.Instance);
var result = await service.GetFirstSignalAsync(run.RunId, TenantId);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.NotAvailable, result.Status);
}
[Fact]
public async Task GetFirstSignalAsync_SnapshotHit_PopulatesCache()
{
var runId = Guid.NewGuid();
var jobId = Guid.NewGuid();
var signal = new FirstSignal
{
Version = "1.0",
SignalId = "sig-1",
JobId = jobId,
Timestamp = new DateTimeOffset(2025, 12, 15, 12, 0, 0, TimeSpan.Zero),
Kind = FirstSignalKind.Queued,
Phase = FirstSignalPhase.Unknown,
Scope = new FirstSignalScope { Type = "run", Id = runId.ToString("D") },
Summary = "Run queued",
EtaSeconds = null,
LastKnownOutcome = null,
NextActions = null,
Diagnostics = new FirstSignalDiagnostics
{
CacheHit = false,
Source = "snapshot",
CorrelationId = string.Empty
}
};
var snapshotRepo = new FakeFirstSignalSnapshotRepository();
await snapshotRepo.UpsertAsync(new FirstSignalSnapshot
{
TenantId = TenantId,
RunId = runId,
JobId = jobId,
CreatedAt = DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Kind = "queued",
Phase = "unknown",
Summary = "Run queued",
EtaSeconds = null,
LastKnownOutcomeJson = null,
NextActionsJson = null,
DiagnosticsJson = "{}",
SignalJson = CanonicalJsonHasher.ToCanonicalJson(signal),
});
var cache = new FakeFirstSignalCache();
using var ttfs = new TimeToFirstSignalMetrics();
var service = new FirstSignalService(
cache,
snapshotRepo,
runRepository: new FakeRunRepository(null),
jobRepository: new FakeJobRepository(),
timeProvider: TimeProvider.System,
ttfsMetrics: ttfs,
options: Options.Create(new FirstSignalOptions()),
logger: NullLogger<FirstSignalService>.Instance);
var first = await service.GetFirstSignalAsync(runId, TenantId);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.Found, first.Status);
Assert.False(first.CacheHit);
Assert.True(cache.TryGet(TenantId, runId, out _));
var second = await service.GetFirstSignalAsync(runId, TenantId);
Assert.Equal(StellaOps.Orchestrator.Core.Services.FirstSignalResultStatus.Found, second.Status);
Assert.True(second.CacheHit);
}
private sealed class FakeFirstSignalCache : IFirstSignalCache
{
private readonly Dictionary<(string TenantId, Guid RunId), FirstSignalCacheEntry> _entries = new();
public string ProviderName => "fake";
public ValueTask<CacheResult<FirstSignalCacheEntry>> GetAsync(string tenantId, Guid runId, CancellationToken cancellationToken = default)
{
if (_entries.TryGetValue((tenantId, runId), out var entry))
{
return ValueTask.FromResult(CacheResult<FirstSignalCacheEntry>.Found(entry));
}
return ValueTask.FromResult(CacheResult<FirstSignalCacheEntry>.Miss());
}
public ValueTask SetAsync(string tenantId, Guid runId, FirstSignalCacheEntry entry, CancellationToken cancellationToken = default)
{
_entries[(tenantId, runId)] = entry;
return ValueTask.CompletedTask;
}
public ValueTask<bool> InvalidateAsync(string tenantId, Guid runId, CancellationToken cancellationToken = default)
{
return ValueTask.FromResult(_entries.Remove((tenantId, runId)));
}
public bool TryGet(string tenantId, Guid runId, out FirstSignalCacheEntry? entry)
{
if (_entries.TryGetValue((tenantId, runId), out var value))
{
entry = value;
return true;
}
entry = null;
return false;
}
}
private sealed class FakeFirstSignalSnapshotRepository : IFirstSignalSnapshotRepository
{
private readonly Dictionary<(string TenantId, Guid RunId), FirstSignalSnapshot> _rows = new();
public Task<FirstSignalSnapshot?> GetByRunIdAsync(string tenantId, Guid runId, CancellationToken cancellationToken = default)
{
_rows.TryGetValue((tenantId, runId), out var snapshot);
return Task.FromResult(snapshot);
}
public Task UpsertAsync(FirstSignalSnapshot snapshot, CancellationToken cancellationToken = default)
{
_rows[(snapshot.TenantId, snapshot.RunId)] = snapshot;
return Task.CompletedTask;
}
public Task DeleteByRunIdAsync(string tenantId, Guid runId, CancellationToken cancellationToken = default)
{
_rows.Remove((tenantId, runId));
return Task.CompletedTask;
}
}
private sealed class FakeRunRepository : IRunRepository
{
private readonly Run? _run;
public FakeRunRepository(Run? run) => _run = run;
public Task<Run?> GetByIdAsync(string tenantId, Guid runId, CancellationToken cancellationToken)
=> Task.FromResult(_run);
public Task CreateAsync(Run run, CancellationToken cancellationToken) => throw new NotImplementedException();
public Task UpdateStatusAsync(
string tenantId,
Guid runId,
RunStatus status,
int totalJobs,
int completedJobs,
int succeededJobs,
int failedJobs,
DateTimeOffset? startedAt,
DateTimeOffset? completedAt,
CancellationToken cancellationToken) => throw new NotImplementedException();
public Task IncrementJobCountsAsync(string tenantId, Guid runId, bool succeeded, CancellationToken cancellationToken)
=> throw new NotImplementedException();
public Task<IReadOnlyList<Run>> ListAsync(
string tenantId,
Guid? sourceId,
string? runType,
RunStatus? status,
string? projectId,
DateTimeOffset? createdAfter,
DateTimeOffset? createdBefore,
int limit,
int offset,
CancellationToken cancellationToken) => throw new NotImplementedException();
public Task<int> CountAsync(
string tenantId,
Guid? sourceId,
string? runType,
RunStatus? status,
string? projectId,
CancellationToken cancellationToken) => throw new NotImplementedException();
}
private sealed class FakeJobRepository : IJobRepository
{
private readonly IReadOnlyList<Job> _jobs;
public FakeJobRepository(params Job[] jobs) => _jobs = jobs;
public Task<IReadOnlyList<Job>> GetByRunIdAsync(string tenantId, Guid runId, CancellationToken cancellationToken)
=> Task.FromResult(_jobs.Where(j => j.RunId == runId).ToList() as IReadOnlyList<Job>);
public Task<Job?> GetByIdAsync(string tenantId, Guid jobId, CancellationToken cancellationToken)
=> throw new NotImplementedException();
public Task<Job?> GetByIdempotencyKeyAsync(string tenantId, string idempotencyKey, CancellationToken cancellationToken)
=> throw new NotImplementedException();
public Task CreateAsync(Job job, CancellationToken cancellationToken)
=> throw new NotImplementedException();
public Task UpdateStatusAsync(
string tenantId,
Guid jobId,
JobStatus status,
int attempt,
Guid? leaseId,
string? workerId,
string? taskRunnerId,
DateTimeOffset? leaseUntil,
DateTimeOffset? scheduledAt,
DateTimeOffset? leasedAt,
DateTimeOffset? completedAt,
DateTimeOffset? notBefore,
string? reason,
CancellationToken cancellationToken) => throw new NotImplementedException();
public Task<Job?> LeaseNextAsync(
string tenantId,
string? jobType,
Guid leaseId,
string workerId,
DateTimeOffset leaseUntil,
CancellationToken cancellationToken) => throw new NotImplementedException();
public Task<bool> ExtendLeaseAsync(
string tenantId,
Guid jobId,
Guid leaseId,
DateTimeOffset newLeaseUntil,
CancellationToken cancellationToken) => throw new NotImplementedException();
public Task<IReadOnlyList<Job>> GetExpiredLeasesAsync(string tenantId, DateTimeOffset cutoff, int limit, CancellationToken cancellationToken)
=> throw new NotImplementedException();
public Task<IReadOnlyList<Job>> ListAsync(
string tenantId,
JobStatus? status,
string? jobType,
string? projectId,
DateTimeOffset? createdAfter,
DateTimeOffset? createdBefore,
int limit,
int offset,
CancellationToken cancellationToken) => throw new NotImplementedException();
public Task<int> CountAsync(
string tenantId,
JobStatus? status,
string? jobType,
string? projectId,
CancellationToken cancellationToken) => throw new NotImplementedException();
}
}

View File

@@ -0,0 +1,33 @@
namespace StellaOps.Orchestrator.WebService.Contracts;
/// <summary>
/// API response for first signal endpoint.
/// </summary>
public sealed record FirstSignalResponse
{
public required Guid RunId { get; init; }
public required FirstSignalDto? FirstSignal { get; init; }
public required string SummaryEtag { get; init; }
}
public sealed record FirstSignalDto
{
public required string Type { get; init; }
public string? Stage { get; init; }
public string? Step { get; init; }
public required string Message { get; init; }
public required DateTimeOffset At { get; init; }
public FirstSignalArtifactDto? Artifact { get; init; }
}
public sealed record FirstSignalArtifactDto
{
public required string Kind { get; init; }
public FirstSignalRangeDto? Range { get; init; }
}
public sealed record FirstSignalRangeDto
{
public required int Start { get; init; }
public required int End { get; init; }
}

View File

@@ -0,0 +1,104 @@
using Microsoft.AspNetCore.Mvc;
using StellaOps.Orchestrator.Core.Services;
using StellaOps.Orchestrator.WebService.Contracts;
using StellaOps.Orchestrator.WebService.Services;
namespace StellaOps.Orchestrator.WebService.Endpoints;
/// <summary>
/// REST API endpoint for first signal (TTFS).
/// </summary>
public static class FirstSignalEndpoints
{
public static RouteGroupBuilder MapFirstSignalEndpoints(this IEndpointRouteBuilder app)
{
var group = app.MapGroup("/api/v1/orchestrator/runs")
.WithTags("Orchestrator Runs");
group.MapGet("{runId:guid}/first-signal", GetFirstSignal)
.WithName("Orchestrator_GetFirstSignal")
.WithDescription("Gets the first meaningful signal for a run");
return group;
}
private static async Task<IResult> GetFirstSignal(
HttpContext context,
[FromRoute] Guid runId,
[FromHeader(Name = "If-None-Match")] string? ifNoneMatch,
[FromServices] TenantResolver tenantResolver,
[FromServices] IFirstSignalService firstSignalService,
CancellationToken cancellationToken)
{
try
{
var tenantId = tenantResolver.Resolve(context);
var result = await firstSignalService
.GetFirstSignalAsync(runId, tenantId, ifNoneMatch, cancellationToken)
.ConfigureAwait(false);
context.Response.Headers["Cache-Status"] = result.CacheHit ? "hit" : "miss";
if (!string.IsNullOrWhiteSpace(result.Source))
{
context.Response.Headers["X-FirstSignal-Source"] = result.Source;
}
if (!string.IsNullOrWhiteSpace(result.ETag))
{
context.Response.Headers.ETag = result.ETag;
context.Response.Headers.CacheControl = "private, max-age=60";
}
return result.Status switch
{
FirstSignalResultStatus.Found => Results.Ok(MapToResponse(runId, result)),
FirstSignalResultStatus.NotModified => Results.StatusCode(StatusCodes.Status304NotModified),
FirstSignalResultStatus.NotFound => Results.NotFound(),
FirstSignalResultStatus.NotAvailable => Results.NoContent(),
_ => Results.Problem("Internal error")
};
}
catch (InvalidOperationException ex)
{
return Results.BadRequest(new { error = ex.Message });
}
catch (ArgumentException ex)
{
return Results.BadRequest(new { error = ex.Message });
}
}
private static FirstSignalResponse MapToResponse(Guid runId, FirstSignalResult result)
{
if (result.Signal is null)
{
return new FirstSignalResponse
{
RunId = runId,
FirstSignal = null,
SummaryEtag = result.ETag ?? string.Empty
};
}
var signal = result.Signal;
return new FirstSignalResponse
{
RunId = runId,
SummaryEtag = result.ETag ?? string.Empty,
FirstSignal = new FirstSignalDto
{
Type = signal.Kind.ToString().ToLowerInvariant(),
Stage = signal.Phase.ToString().ToLowerInvariant(),
Step = null,
Message = signal.Summary,
At = signal.Timestamp,
Artifact = new FirstSignalArtifactDto
{
Kind = signal.Scope.Type,
Range = null
}
}
};
}
}

View File

@@ -47,7 +47,7 @@ public static class StreamEndpoints
{
try
{
var tenantId = tenantResolver.Resolve(context);
var tenantId = tenantResolver.ResolveForStreaming(context);
var job = await jobRepository.GetByIdAsync(tenantId, jobId, cancellationToken).ConfigureAwait(false);
if (job is null)
@@ -83,7 +83,7 @@ public static class StreamEndpoints
{
try
{
var tenantId = tenantResolver.Resolve(context);
var tenantId = tenantResolver.ResolveForStreaming(context);
var run = await runRepository.GetByIdAsync(tenantId, runId, cancellationToken).ConfigureAwait(false);
if (run is null)
@@ -119,7 +119,7 @@ public static class StreamEndpoints
{
try
{
var tenantId = tenantResolver.Resolve(context);
var tenantId = tenantResolver.ResolveForStreaming(context);
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken).ConfigureAwait(false);
if (packRun is null)
{
@@ -158,7 +158,7 @@ public static class StreamEndpoints
return;
}
var tenantId = tenantResolver.Resolve(context);
var tenantId = tenantResolver.ResolveForStreaming(context);
var packRun = await packRunRepository.GetByIdAsync(tenantId, packRunId, cancellationToken).ConfigureAwait(false);
if (packRun is null)
{

View File

@@ -1,5 +1,10 @@
using StellaOps.Messaging.DependencyInjection;
using StellaOps.Messaging.Transport.InMemory;
using StellaOps.Messaging.Transport.Postgres;
using StellaOps.Messaging.Transport.Valkey;
using StellaOps.Orchestrator.Core.Scale;
using StellaOps.Orchestrator.Infrastructure;
using StellaOps.Orchestrator.Infrastructure.Services;
using StellaOps.Orchestrator.WebService.Endpoints;
using StellaOps.Orchestrator.WebService.Services;
using StellaOps.Orchestrator.WebService.Streaming;
@@ -11,6 +16,27 @@ builder.Services.AddRouting(options => options.LowercaseUrls = true);
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddOpenApi();
// Register messaging transport (used for distributed caching primitives).
// Defaults to in-memory unless explicitly configured.
var configuredCacheBackend = builder.Configuration["FirstSignal:Cache:Backend"]?.Trim().ToLowerInvariant();
var configuredTransport = builder.Configuration["messaging:transport"]?.Trim().ToLowerInvariant();
var transport = string.IsNullOrWhiteSpace(configuredCacheBackend) ? configuredTransport : configuredCacheBackend;
switch (transport)
{
case "none":
break;
case "valkey":
builder.Services.AddMessagingTransport<ValkeyTransportPlugin>(builder.Configuration);
break;
case "postgres":
builder.Services.AddMessagingTransport<PostgresTransportPlugin>(builder.Configuration);
break;
default:
builder.Services.AddMessagingTransport<InMemoryTransportPlugin>(builder.Configuration);
break;
}
// Register StellaOps telemetry with OpenTelemetry integration
// Per ORCH-OBS-50-001: Wire StellaOps.Telemetry.Core into orchestrator host
builder.Services.AddStellaOpsTelemetry(
@@ -35,6 +61,9 @@ builder.Services.AddTelemetryContextPropagation();
// Register golden signal metrics for scheduler instrumentation
builder.Services.AddGoldenSignalMetrics();
// Register TTFS metrics for first-signal endpoint/service
builder.Services.AddTimeToFirstSignalMetrics();
// Register incident mode for enhanced telemetry during incidents
builder.Services.AddIncidentMode(builder.Configuration);
@@ -50,9 +79,12 @@ builder.Services.AddSingleton(TimeProvider.System);
// Register streaming options and coordinators
builder.Services.Configure<StreamOptions>(builder.Configuration.GetSection(StreamOptions.SectionName));
builder.Services.AddSingleton<IJobStreamCoordinator, JobStreamCoordinator>();
builder.Services.AddSingleton<IRunStreamCoordinator, RunStreamCoordinator>();
builder.Services.AddSingleton<IPackRunStreamCoordinator, PackRunStreamCoordinator>();
builder.Services.AddScoped<IJobStreamCoordinator, JobStreamCoordinator>();
builder.Services.AddScoped<IRunStreamCoordinator, RunStreamCoordinator>();
builder.Services.AddScoped<IPackRunStreamCoordinator, PackRunStreamCoordinator>();
// Optional TTFS snapshot writer (disabled by default via config)
builder.Services.AddHostedService<FirstSignalSnapshotWriter>();
// Register scale metrics and load shedding services
builder.Services.AddSingleton<ScaleMetrics>();
@@ -85,6 +117,7 @@ app.MapScaleEndpoints();
// Register API endpoints
app.MapSourceEndpoints();
app.MapRunEndpoints();
app.MapFirstSignalEndpoints();
app.MapJobEndpoints();
app.MapDagEndpoints();
app.MapPackRunEndpoints();

View File

@@ -10,6 +10,7 @@ public sealed class TenantResolver
{
private readonly OrchestratorServiceOptions _options;
private const string DefaultTenantHeader = "X-Tenant-Id";
private const string DefaultTenantQueryParam = "tenant";
public TenantResolver(IOptions<OrchestratorServiceOptions> options)
{
@@ -44,6 +45,31 @@ public sealed class TenantResolver
return tenantId.Trim();
}
/// <summary>
/// Resolves the tenant ID for streaming endpoints.
/// EventSource cannot set custom headers, so we allow a query string fallback.
/// </summary>
/// <param name="context">HTTP context.</param>
/// <returns>Tenant ID.</returns>
public string ResolveForStreaming(HttpContext context)
{
ArgumentNullException.ThrowIfNull(context);
if (TryResolve(context, out var tenantId) && !string.IsNullOrWhiteSpace(tenantId))
{
return tenantId;
}
if (TryResolveFromQuery(context, out tenantId) && !string.IsNullOrWhiteSpace(tenantId))
{
return tenantId;
}
var headerName = _options.TenantHeader ?? DefaultTenantHeader;
throw new InvalidOperationException(
$"Tenant header '{headerName}' or query parameter '{DefaultTenantQueryParam}' is required for Orchestrator streaming operations.");
}
/// <summary>
/// Tries to resolve the tenant ID from the request headers.
/// </summary>
@@ -75,4 +101,23 @@ public sealed class TenantResolver
tenantId = value.Trim();
return true;
}
private static bool TryResolveFromQuery(HttpContext context, out string? tenantId)
{
tenantId = null;
if (context is null)
{
return false;
}
var value = context.Request.Query[DefaultTenantQueryParam].ToString();
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
tenantId = value.Trim();
return true;
}
}

View File

@@ -35,6 +35,11 @@
<ProjectReference Include="..\..\..\Telemetry\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core.csproj"/>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging.Transport.InMemory\StellaOps.Messaging.Transport.InMemory.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging.Transport.Postgres\StellaOps.Messaging.Transport.Postgres.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging.Transport.Valkey\StellaOps.Messaging.Transport.Valkey.csproj" />
</ItemGroup>

View File

@@ -1,6 +1,7 @@
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Services;
using StellaOps.Orchestrator.Infrastructure.Repositories;
namespace StellaOps.Orchestrator.WebService.Streaming;
@@ -24,17 +25,20 @@ public sealed class RunStreamCoordinator : IRunStreamCoordinator
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IRunRepository _runRepository;
private readonly IFirstSignalService _firstSignalService;
private readonly TimeProvider _timeProvider;
private readonly ILogger<RunStreamCoordinator> _logger;
private readonly StreamOptions _options;
public RunStreamCoordinator(
IRunRepository runRepository,
IFirstSignalService firstSignalService,
IOptions<StreamOptions> options,
TimeProvider? timeProvider,
ILogger<RunStreamCoordinator> logger)
{
_runRepository = runRepository ?? throw new ArgumentNullException(nameof(runRepository));
_firstSignalService = firstSignalService ?? throw new ArgumentNullException(nameof(firstSignalService));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value.Validate();
@@ -49,9 +53,12 @@ public sealed class RunStreamCoordinator : IRunStreamCoordinator
SseWriter.ConfigureSseHeaders(response);
await SseWriter.WriteRetryAsync(response, _options.ReconnectDelay, cancellationToken).ConfigureAwait(false);
string? lastFirstSignalEtag = null;
var lastRun = initialRun;
await SseWriter.WriteEventAsync(response, "initial", RunSnapshotPayload.FromRun(lastRun), SerializerOptions, cancellationToken).ConfigureAwait(false);
await SseWriter.WriteEventAsync(response, "heartbeat", HeartbeatPayload.Create(_timeProvider.GetUtcNow(), lastRun.RunId.ToString()), SerializerOptions, cancellationToken).ConfigureAwait(false);
lastFirstSignalEtag = await EmitFirstSignalIfUpdatedAsync(response, tenantId, lastRun.RunId, lastFirstSignalEtag, cancellationToken).ConfigureAwait(false);
// If already terminal, send completed and exit
if (IsTerminal(lastRun.Status))
@@ -91,6 +98,8 @@ public sealed class RunStreamCoordinator : IRunStreamCoordinator
break;
}
lastFirstSignalEtag = await EmitFirstSignalIfUpdatedAsync(response, tenantId, current.RunId, lastFirstSignalEtag, cancellationToken).ConfigureAwait(false);
if (HasChanged(lastRun, current))
{
await EmitProgressAsync(response, current, cancellationToken).ConfigureAwait(false);
@@ -162,6 +171,45 @@ public sealed class RunStreamCoordinator : IRunStreamCoordinator
await SseWriter.WriteEventAsync(response, "completed", payload, SerializerOptions, cancellationToken).ConfigureAwait(false);
}
private async Task<string?> EmitFirstSignalIfUpdatedAsync(
HttpResponse response,
string tenantId,
Guid runId,
string? lastFirstSignalEtag,
CancellationToken cancellationToken)
{
try
{
var result = await _firstSignalService
.GetFirstSignalAsync(runId, tenantId, lastFirstSignalEtag, cancellationToken)
.ConfigureAwait(false);
if (result.Status != FirstSignalResultStatus.Found || result.Signal is null || string.IsNullOrWhiteSpace(result.ETag))
{
return lastFirstSignalEtag;
}
await SseWriter.WriteEventAsync(
response,
"first_signal",
new { runId, signal = result.Signal, etag = result.ETag },
SerializerOptions,
cancellationToken)
.ConfigureAwait(false);
return result.ETag;
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
return lastFirstSignalEtag;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to emit first_signal event for run {RunId}.", runId);
return lastFirstSignalEtag;
}
}
private static bool IsTerminal(RunStatus status) =>
status is RunStatus.Succeeded or RunStatus.PartiallySucceeded or RunStatus.Failed or RunStatus.Canceled;
}

View File

@@ -21,3 +21,13 @@ Status mirror for `docs/implplan/SPRINT_0152_0001_0002_orchestrator_ii.md`. Upda
| 15 | ORCH-SVC-37-101 | DONE | Scheduled exports, pruning, failure alerting. |
Last synced: 2025-11-30 (UTC).
## SPRINT_0339_0001_0001 First Signal API
Status mirror for `docs/implplan/SPRINT_0339_0001_0001_first_signal_api.md`. Update alongside the sprint file to avoid drift.
| # | Task ID | Status | Notes |
| --- | --- | --- | --- |
| 1 | ORCH-TTFS-0339-001 | DONE | First signal API delivered (service/repo/cache/endpoint/ETag/SSE/tests/docs). |
Last synced: 2025-12-15 (UTC).

View File

@@ -0,0 +1,179 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Scoring;
namespace StellaOps.Policy.Engine.Scoring;
/// <summary>
/// Provides score policies with caching and digest computation.
/// </summary>
public interface IScorePolicyService
{
/// <summary>
/// Gets the active score policy for a tenant.
/// </summary>
ScorePolicy GetPolicy(string tenantId);
/// <summary>
/// Computes the canonical digest of a score policy for determinism tracking.
/// </summary>
string ComputePolicyDigest(ScorePolicy policy);
/// <summary>
/// Gets the cached digest for a tenant's policy.
/// </summary>
string? GetCachedDigest(string tenantId);
/// <summary>
/// Reloads policies from disk (cache invalidation).
/// </summary>
void Reload();
}
public sealed class ScorePolicyService : IScorePolicyService
{
private readonly IScorePolicyProvider _provider;
private readonly ConcurrentDictionary<string, (ScorePolicy Policy, string Digest)> _cache = new();
private readonly ILogger<ScorePolicyService> _logger;
private static readonly JsonSerializerOptions CanonicalJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
public ScorePolicyService(
IScorePolicyProvider provider,
ILogger<ScorePolicyService> logger)
{
_provider = provider ?? throw new ArgumentNullException(nameof(provider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ScorePolicy GetPolicy(string tenantId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
return _cache.GetOrAdd(tenantId, tid =>
{
var policy = _provider.GetPolicy(tid);
var digest = ComputePolicyDigest(policy);
_logger.LogInformation(
"Loaded score policy for tenant {TenantId}, digest: {Digest}",
tid, digest);
return (policy, digest);
}).Policy;
}
public string? GetCachedDigest(string tenantId)
{
return _cache.TryGetValue(tenantId, out var entry) ? entry.Digest : null;
}
public string ComputePolicyDigest(ScorePolicy policy)
{
ArgumentNullException.ThrowIfNull(policy);
// Canonical JSON serialization for deterministic digest
var json = JsonSerializer.Serialize(policy, CanonicalJsonOptions);
var bytes = Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
public void Reload()
{
var count = _cache.Count;
_cache.Clear();
_logger.LogInformation("Score policy cache cleared ({Count} entries removed)", count);
}
}
/// <summary>
/// Provides score policies from a configured source.
/// </summary>
public interface IScorePolicyProvider
{
/// <summary>
/// Gets the score policy for a tenant.
/// </summary>
ScorePolicy GetPolicy(string tenantId);
}
/// <summary>
/// File-based score policy provider.
/// </summary>
public sealed class FileScorePolicyProvider : IScorePolicyProvider
{
private readonly ScorePolicyLoader _loader;
private readonly string _basePath;
private readonly ILogger<FileScorePolicyProvider> _logger;
public FileScorePolicyProvider(
ScorePolicyLoader loader,
string basePath,
ILogger<FileScorePolicyProvider> logger)
{
_loader = loader ?? throw new ArgumentNullException(nameof(loader));
_basePath = basePath ?? throw new ArgumentNullException(nameof(basePath));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ScorePolicy GetPolicy(string tenantId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
// Try tenant-specific policy first
var tenantPath = Path.Combine(_basePath, $"score-policy.{tenantId}.yaml");
if (File.Exists(tenantPath))
{
_logger.LogDebug("Loading tenant-specific score policy from {Path}", tenantPath);
return _loader.LoadFromFile(tenantPath);
}
// Fall back to default policy
var defaultPath = Path.Combine(_basePath, "score-policy.yaml");
if (File.Exists(defaultPath))
{
_logger.LogDebug("Loading default score policy from {Path}", defaultPath);
return _loader.LoadFromFile(defaultPath);
}
// Use built-in default
_logger.LogDebug("Using built-in default score policy for tenant {TenantId}", tenantId);
return ScorePolicy.Default;
}
}
/// <summary>
/// In-memory score policy provider for testing.
/// </summary>
public sealed class InMemoryScorePolicyProvider : IScorePolicyProvider
{
private readonly ConcurrentDictionary<string, ScorePolicy> _policies = new();
private ScorePolicy _defaultPolicy = ScorePolicy.Default;
public ScorePolicy GetPolicy(string tenantId)
{
return _policies.TryGetValue(tenantId, out var policy) ? policy : _defaultPolicy;
}
public void SetPolicy(string tenantId, ScorePolicy policy)
{
_policies[tenantId] = policy;
}
public void SetDefaultPolicy(ScorePolicy policy)
{
_defaultPolicy = policy;
}
public void Clear()
{
_policies.Clear();
_defaultPolicy = ScorePolicy.Default;
}
}

View File

@@ -37,6 +37,7 @@
<ProjectReference Include="../../Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="../StellaOps.Policy.RiskProfile/StellaOps.Policy.RiskProfile.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Policy.Storage.Postgres/StellaOps.Policy.Storage.Postgres.csproj" />
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.ProofSpine/StellaOps.Scanner.ProofSpine.csproj" />
</ItemGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Policy.Engine.Tests" />

View File

@@ -0,0 +1,207 @@
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.Scanner.ProofSpine;
namespace StellaOps.Policy.Engine.Vex;
/// <summary>
/// Service for creating proof spines from VEX decisions.
/// </summary>
public interface IVexProofSpineService
{
/// <summary>
/// Creates a proof spine for a VEX decision.
/// </summary>
Task<ProofSpineResult> CreateSpineAsync(
VexStatement statement,
VexProofSpineContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Creates proof spines for all statements in a VEX document.
/// </summary>
Task<IReadOnlyList<ProofSpineResult>> CreateSpinesForDocumentAsync(
VexDecisionDocument document,
VexProofSpineContext context,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context information for proof spine creation.
/// </summary>
public sealed record VexProofSpineContext
{
public required string TenantId { get; init; }
public string? ScanId { get; init; }
public string? PolicyProfileId { get; init; }
public string? SbomDigest { get; init; }
public string? GraphDigest { get; init; }
}
/// <summary>
/// Result of proof spine creation.
/// </summary>
public sealed record ProofSpineResult
{
public required string SpineId { get; init; }
public required string ArtifactId { get; init; }
public required string VulnerabilityId { get; init; }
public required string Verdict { get; init; }
public required int SegmentCount { get; init; }
public string? RootHash { get; init; }
public DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Default implementation of <see cref="IVexProofSpineService"/>.
/// </summary>
public sealed class VexProofSpineService : IVexProofSpineService
{
private readonly IDsseSigningService _signer;
private readonly ICryptoProfile _cryptoProfile;
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
private readonly ILogger<VexProofSpineService> _logger;
public VexProofSpineService(
IDsseSigningService signer,
ICryptoProfile cryptoProfile,
ICryptoHash cryptoHash,
TimeProvider timeProvider,
ILogger<VexProofSpineService> logger)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_cryptoProfile = cryptoProfile ?? throw new ArgumentNullException(nameof(cryptoProfile));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
private const string ToolId = "stellaops/policy-engine";
private const string ToolVersion = "1.0.0";
/// <inheritdoc/>
public async Task<ProofSpineResult> CreateSpineAsync(
VexStatement statement,
VexProofSpineContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(statement);
ArgumentNullException.ThrowIfNull(context);
var artifactId = statement.Products.FirstOrDefault()?.Id ?? "unknown";
var vulnId = statement.Vulnerability.Id;
var builder = new ProofSpineBuilder(_signer, _cryptoProfile, _cryptoHash, _timeProvider)
.ForArtifact(artifactId)
.ForVulnerability(vulnId);
if (!string.IsNullOrEmpty(context.ScanId))
{
builder.WithScanRun(context.ScanId);
}
if (!string.IsNullOrEmpty(context.PolicyProfileId))
{
builder.WithPolicyProfile(context.PolicyProfileId);
}
// Add SBOM slice segment if available
if (!string.IsNullOrEmpty(context.SbomDigest))
{
builder.AddSbomSlice(
context.SbomDigest,
new[] { artifactId },
ToolId,
ToolVersion);
}
// Add reachability analysis segment if evidence is present
if (statement.Evidence is not null)
{
var graphHash = statement.Evidence.GraphHash ?? context.GraphDigest;
if (!string.IsNullOrEmpty(graphHash))
{
builder.AddReachability(
graphHash,
statement.Evidence.LatticeState ?? "U",
statement.Evidence.Confidence,
statement.Evidence.CallPath?.ToList(),
ToolId,
ToolVersion);
}
}
// Add policy evaluation segment with final verdict
var factors = new Dictionary<string, string>
{
["lattice_state"] = statement.Evidence?.LatticeState ?? "U",
["confidence"] = statement.Evidence?.Confidence.ToString("F2") ?? "0.00"
};
builder.AddPolicyEval(
context.PolicyProfileId ?? "default",
factors,
statement.Status,
statement.Justification ?? "VEX decision based on reachability analysis",
ToolId,
ToolVersion);
// Build the spine
var spine = await builder.BuildAsync(cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Created proof spine {SpineId} for {VulnId}:{ArtifactId} with verdict {Verdict}",
spine.SpineId,
vulnId,
artifactId,
statement.Status);
return new ProofSpineResult
{
SpineId = spine.SpineId,
ArtifactId = artifactId,
VulnerabilityId = vulnId,
Verdict = statement.Status,
SegmentCount = spine.Segments.Count,
RootHash = spine.RootHash,
CreatedAt = spine.CreatedAt
};
}
/// <inheritdoc/>
public async Task<IReadOnlyList<ProofSpineResult>> CreateSpinesForDocumentAsync(
VexDecisionDocument document,
VexProofSpineContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(document);
ArgumentNullException.ThrowIfNull(context);
var results = new List<ProofSpineResult>();
foreach (var statement in document.Statements)
{
try
{
var result = await CreateSpineAsync(statement, context, cancellationToken)
.ConfigureAwait(false);
results.Add(result);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to create proof spine for {VulnId}",
statement.Vulnerability.Id);
}
}
_logger.LogInformation(
"Created {Count} proof spines for VEX document {DocumentId}",
results.Count,
document.Id);
return results;
}
}

View File

@@ -0,0 +1,141 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stellaops.org/schemas/score-policy.v1.json",
"title": "StellaOps Score Policy v1",
"description": "Defines deterministic vulnerability scoring weights, buckets, and overrides",
"type": "object",
"required": ["policyVersion", "weightsBps"],
"properties": {
"policyVersion": {
"const": "score.v1",
"description": "Policy schema version"
},
"weightsBps": {
"type": "object",
"description": "Weight distribution in basis points (must sum to 10000)",
"required": ["baseSeverity", "reachability", "evidence", "provenance"],
"properties": {
"baseSeverity": { "type": "integer", "minimum": 0, "maximum": 10000 },
"reachability": { "type": "integer", "minimum": 0, "maximum": 10000 },
"evidence": { "type": "integer", "minimum": 0, "maximum": 10000 },
"provenance": { "type": "integer", "minimum": 0, "maximum": 10000 }
},
"additionalProperties": false
},
"reachability": {
"$ref": "#/$defs/reachabilityConfig"
},
"evidence": {
"$ref": "#/$defs/evidenceConfig"
},
"provenance": {
"$ref": "#/$defs/provenanceConfig"
},
"overrides": {
"type": "array",
"items": { "$ref": "#/$defs/scoreOverride" }
}
},
"additionalProperties": false,
"$defs": {
"reachabilityConfig": {
"type": "object",
"properties": {
"hopBuckets": {
"type": "array",
"items": {
"type": "object",
"required": ["maxHops", "score"],
"properties": {
"maxHops": { "type": "integer", "minimum": 0 },
"score": { "type": "integer", "minimum": 0, "maximum": 100 }
},
"additionalProperties": false
}
},
"unreachableScore": { "type": "integer", "minimum": 0, "maximum": 100 },
"gateMultipliersBps": {
"type": "object",
"properties": {
"featureFlag": { "type": "integer", "minimum": 0, "maximum": 10000 },
"authRequired": { "type": "integer", "minimum": 0, "maximum": 10000 },
"adminOnly": { "type": "integer", "minimum": 0, "maximum": 10000 },
"nonDefaultConfig": { "type": "integer", "minimum": 0, "maximum": 10000 }
},
"additionalProperties": false
}
},
"additionalProperties": false
},
"evidenceConfig": {
"type": "object",
"properties": {
"points": {
"type": "object",
"properties": {
"runtime": { "type": "integer", "minimum": 0, "maximum": 100 },
"dast": { "type": "integer", "minimum": 0, "maximum": 100 },
"sast": { "type": "integer", "minimum": 0, "maximum": 100 },
"sca": { "type": "integer", "minimum": 0, "maximum": 100 }
},
"additionalProperties": false
},
"freshnessBuckets": {
"type": "array",
"items": {
"type": "object",
"required": ["maxAgeDays", "multiplierBps"],
"properties": {
"maxAgeDays": { "type": "integer", "minimum": 0 },
"multiplierBps": { "type": "integer", "minimum": 0, "maximum": 10000 }
},
"additionalProperties": false
}
}
},
"additionalProperties": false
},
"provenanceConfig": {
"type": "object",
"properties": {
"levels": {
"type": "object",
"properties": {
"unsigned": { "type": "integer", "minimum": 0, "maximum": 100 },
"signed": { "type": "integer", "minimum": 0, "maximum": 100 },
"signedWithSbom": { "type": "integer", "minimum": 0, "maximum": 100 },
"signedWithSbomAndAttestations": { "type": "integer", "minimum": 0, "maximum": 100 },
"reproducible": { "type": "integer", "minimum": 0, "maximum": 100 }
},
"additionalProperties": false
}
},
"additionalProperties": false
},
"scoreOverride": {
"type": "object",
"required": ["name", "when"],
"properties": {
"name": { "type": "string", "minLength": 1 },
"when": {
"type": "object",
"properties": {
"flags": {
"type": "object",
"additionalProperties": { "type": "boolean" }
},
"minReachability": { "type": "integer", "minimum": 0, "maximum": 100 },
"maxReachability": { "type": "integer", "minimum": 0, "maximum": 100 },
"minEvidence": { "type": "integer", "minimum": 0, "maximum": 100 },
"maxEvidence": { "type": "integer", "minimum": 0, "maximum": 100 }
},
"additionalProperties": false
},
"setScore": { "type": "integer", "minimum": 0, "maximum": 100 },
"clampMaxScore": { "type": "integer", "minimum": 0, "maximum": 100 },
"clampMinScore": { "type": "integer", "minimum": 0, "maximum": 100 }
},
"additionalProperties": false
}
}
}

View File

@@ -0,0 +1,99 @@
using System.Text;
using YamlDotNet.Core;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Loads score policies from YAML files.
/// </summary>
public sealed class ScorePolicyLoader
{
private static readonly IDeserializer Deserializer = new DeserializerBuilder()
.WithNamingConvention(CamelCaseNamingConvention.Instance)
.IgnoreUnmatchedProperties()
.Build();
/// <summary>
/// Loads a score policy from a YAML file.
/// </summary>
/// <param name="path">Path to the YAML file</param>
/// <returns>Parsed score policy</returns>
/// <exception cref="ScorePolicyLoadException">If parsing fails</exception>
public ScorePolicy LoadFromFile(string path)
{
if (string.IsNullOrWhiteSpace(path))
throw new ArgumentException("Path cannot be null or empty", nameof(path));
if (!File.Exists(path))
throw new ScorePolicyLoadException($"Score policy file not found: {path}");
var yaml = File.ReadAllText(path, Encoding.UTF8);
return LoadFromYaml(yaml, path);
}
/// <summary>
/// Loads a score policy from YAML content.
/// </summary>
/// <param name="yaml">YAML content</param>
/// <param name="source">Source identifier for error messages</param>
/// <returns>Parsed score policy</returns>
public ScorePolicy LoadFromYaml(string yaml, string source = "<inline>")
{
if (string.IsNullOrWhiteSpace(yaml))
throw new ScorePolicyLoadException($"Empty YAML content from {source}");
try
{
var policy = Deserializer.Deserialize<ScorePolicy>(yaml);
if (policy is null)
throw new ScorePolicyLoadException($"Failed to parse score policy from {source}: empty document");
// Validate policy version
if (policy.PolicyVersion != "score.v1")
throw new ScorePolicyLoadException(
$"Unsupported policy version '{policy.PolicyVersion}' in {source}. Expected 'score.v1'");
// Validate weight sum
if (!policy.ValidateWeights())
{
var sum = policy.WeightsBps.BaseSeverity + policy.WeightsBps.Reachability +
policy.WeightsBps.Evidence + policy.WeightsBps.Provenance;
throw new ScorePolicyLoadException(
$"Weight basis points must sum to 10000 in {source}. Got: {sum}");
}
return policy;
}
catch (YamlException ex)
{
throw new ScorePolicyLoadException($"YAML parse error in {source}: {ex.Message}", ex);
}
}
/// <summary>
/// Tries to load a score policy, returning null on failure.
/// </summary>
public ScorePolicy? TryLoadFromFile(string path)
{
try
{
return LoadFromFile(path);
}
catch (ScorePolicyLoadException)
{
return null;
}
}
}
/// <summary>
/// Exception thrown when score policy loading fails.
/// </summary>
public sealed class ScorePolicyLoadException : Exception
{
public ScorePolicyLoadException(string message) : base(message) { }
public ScorePolicyLoadException(string message, Exception inner) : base(message, inner) { }
}

View File

@@ -0,0 +1,173 @@
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Root score policy configuration loaded from YAML.
/// </summary>
public sealed record ScorePolicy
{
public required string PolicyVersion { get; init; }
public required WeightsBps WeightsBps { get; init; }
public ReachabilityPolicyConfig? Reachability { get; init; }
public EvidencePolicyConfig? Evidence { get; init; }
public ProvenancePolicyConfig? Provenance { get; init; }
public IReadOnlyList<ScoreOverride>? Overrides { get; init; }
/// <summary>
/// Validates that weight basis points sum to 10000.
/// </summary>
public bool ValidateWeights()
{
var sum = WeightsBps.BaseSeverity + WeightsBps.Reachability +
WeightsBps.Evidence + WeightsBps.Provenance;
return sum == 10000;
}
/// <summary>
/// Creates a default score policy.
/// </summary>
public static ScorePolicy Default => new()
{
PolicyVersion = "score.v1",
WeightsBps = WeightsBps.Default,
Reachability = ReachabilityPolicyConfig.Default,
Evidence = EvidencePolicyConfig.Default,
Provenance = ProvenancePolicyConfig.Default,
Overrides = []
};
}
/// <summary>
/// Weight distribution in basis points. Must sum to 10000.
/// </summary>
public sealed record WeightsBps
{
public required int BaseSeverity { get; init; }
public required int Reachability { get; init; }
public required int Evidence { get; init; }
public required int Provenance { get; init; }
public static WeightsBps Default => new()
{
BaseSeverity = 1000, // 10%
Reachability = 4500, // 45%
Evidence = 3000, // 30%
Provenance = 1500 // 15%
};
}
/// <summary>
/// Reachability scoring configuration.
/// </summary>
public sealed record ReachabilityPolicyConfig
{
public IReadOnlyList<HopBucket>? HopBuckets { get; init; }
public int UnreachableScore { get; init; } = 0;
public GateMultipliersBps? GateMultipliersBps { get; init; }
public static ReachabilityPolicyConfig Default => new()
{
HopBuckets =
[
new HopBucket(0, 100), // Direct call
new HopBucket(1, 90), // 1 hop
new HopBucket(3, 70), // 2-3 hops
new HopBucket(5, 50), // 4-5 hops
new HopBucket(10, 30), // 6-10 hops
new HopBucket(int.MaxValue, 10) // > 10 hops
],
UnreachableScore = 0,
GateMultipliersBps = Scoring.GateMultipliersBps.Default
};
}
public sealed record HopBucket(int MaxHops, int Score);
public sealed record GateMultipliersBps
{
public int FeatureFlag { get; init; } = 7000;
public int AuthRequired { get; init; } = 8000;
public int AdminOnly { get; init; } = 8500;
public int NonDefaultConfig { get; init; } = 7500;
public static GateMultipliersBps Default => new();
}
/// <summary>
/// Evidence scoring configuration.
/// </summary>
public sealed record EvidencePolicyConfig
{
public EvidencePoints? Points { get; init; }
public IReadOnlyList<FreshnessBucket>? FreshnessBuckets { get; init; }
public static EvidencePolicyConfig Default => new()
{
Points = EvidencePoints.Default,
FreshnessBuckets =
[
new FreshnessBucket(7, 10000), // 0-7 days: 100%
new FreshnessBucket(30, 9000), // 8-30 days: 90%
new FreshnessBucket(90, 7000), // 31-90 days: 70%
new FreshnessBucket(180, 5000), // 91-180 days: 50%
new FreshnessBucket(365, 3000), // 181-365 days: 30%
new FreshnessBucket(int.MaxValue, 1000) // > 1 year: 10%
]
};
}
public sealed record EvidencePoints
{
public int Runtime { get; init; } = 60;
public int Dast { get; init; } = 30;
public int Sast { get; init; } = 20;
public int Sca { get; init; } = 10;
public static EvidencePoints Default => new();
}
public sealed record FreshnessBucket(int MaxAgeDays, int MultiplierBps);
/// <summary>
/// Provenance scoring configuration.
/// </summary>
public sealed record ProvenancePolicyConfig
{
public ProvenanceLevels? Levels { get; init; }
public static ProvenancePolicyConfig Default => new()
{
Levels = ProvenanceLevels.Default
};
}
public sealed record ProvenanceLevels
{
public int Unsigned { get; init; } = 0;
public int Signed { get; init; } = 30;
public int SignedWithSbom { get; init; } = 60;
public int SignedWithSbomAndAttestations { get; init; } = 80;
public int Reproducible { get; init; } = 100;
public static ProvenanceLevels Default => new();
}
/// <summary>
/// Score override rule for special conditions.
/// </summary>
public sealed record ScoreOverride
{
public required string Name { get; init; }
public required ScoreOverrideCondition When { get; init; }
public int? SetScore { get; init; }
public int? ClampMaxScore { get; init; }
public int? ClampMinScore { get; init; }
}
public sealed record ScoreOverrideCondition
{
public IReadOnlyDictionary<string, bool>? Flags { get; init; }
public int? MinReachability { get; init; }
public int? MaxReachability { get; init; }
public int? MinEvidence { get; init; }
public int? MaxEvidence { get; init; }
}

View File

@@ -21,7 +21,9 @@ using StellaOps.Cryptography.Plugin.BouncyCastle;
using StellaOps.Concelier.Core.Linksets;
using StellaOps.Policy;
using StellaOps.Scanner.Cache;
using StellaOps.Scanner.Core.Configuration;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Core.TrustAnchors;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.FS;
using StellaOps.Scanner.Surface.Secrets;
@@ -71,6 +73,13 @@ builder.Services.AddOptions<ScannerWebServiceOptions>()
})
.ValidateOnStart();
builder.Services.AddSingleton<IValidateOptions<OfflineKitOptions>, OfflineKitOptionsValidator>();
builder.Services.AddOptions<OfflineKitOptions>()
.Bind(builder.Configuration.GetSection(OfflineKitOptions.SectionName))
.ValidateOnStart();
builder.Services.AddSingleton<IPublicKeyLoader, FileSystemPublicKeyLoader>();
builder.Services.AddSingleton<ITrustAnchorRegistry, TrustAnchorRegistry>();
builder.Host.UseSerilog((context, services, loggerConfiguration) =>
{
loggerConfiguration

View File

@@ -4,3 +4,4 @@
| --- | --- | --- | --- |
| `SCAN-API-3101-001` | `docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md` | DOING | Align Scanner OpenAPI spec with current endpoints and include ProofSpine routes; compose into `src/Api/StellaOps.Api.OpenApi/stella.yaml`. |
| `PROOFSPINE-3100-API` | `docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md` | DOING | Implement and test `/api/v1/spines/*` endpoints and wire verification output. |
| `SCAN-AIRGAP-0340-001` | `docs/implplan/SPRINT_0340_0001_0001_scanner_offline_config.md` | BLOCKED | Offline kit verification wiring is blocked on an import pipeline + offline Rekor verifier. |

View File

@@ -0,0 +1,72 @@
namespace StellaOps.Scanner.Worker.Determinism.Calculators;
/// <summary>
/// Calculates Bitwise Fidelity (BF) by comparing SHA-256 hashes of outputs.
/// </summary>
public sealed class BitwiseFidelityCalculator
{
/// <summary>
/// Computes BF by comparing hashes across replay runs.
/// </summary>
/// <param name="baselineHashes">Hashes from baseline run (artifact -> hash)</param>
/// <param name="replayHashes">Hashes from each replay run</param>
/// <returns>BF score and mismatch details</returns>
public (double Score, int IdenticalCount, List<FidelityMismatch> Mismatches) Calculate(
IReadOnlyDictionary<string, string> baselineHashes,
IReadOnlyList<IReadOnlyDictionary<string, string>> replayHashes)
{
ArgumentNullException.ThrowIfNull(baselineHashes);
ArgumentNullException.ThrowIfNull(replayHashes);
if (replayHashes.Count == 0)
return (1.0, 0, []);
var identicalCount = 0;
var mismatches = new List<FidelityMismatch>();
for (var i = 0; i < replayHashes.Count; i++)
{
var replay = replayHashes[i];
var identical = true;
var diffArtifacts = new List<string>();
foreach (var (artifact, baselineHash) in baselineHashes)
{
if (!replay.TryGetValue(artifact, out var replayHash) ||
!string.Equals(baselineHash, replayHash, StringComparison.OrdinalIgnoreCase))
{
identical = false;
diffArtifacts.Add(artifact);
}
}
// Also check for artifacts in replay but not in baseline
foreach (var artifact in replay.Keys)
{
if (!baselineHashes.ContainsKey(artifact) && !diffArtifacts.Contains(artifact))
{
identical = false;
diffArtifacts.Add(artifact);
}
}
if (identical)
{
identicalCount++;
}
else
{
mismatches.Add(new FidelityMismatch
{
RunIndex = i,
Type = FidelityMismatchType.BitwiseOnly,
Description = $"Hash mismatch in {diffArtifacts.Count} artifact(s)",
AffectedArtifacts = diffArtifacts.OrderBy(a => a, StringComparer.Ordinal).ToList()
});
}
}
var score = (double)identicalCount / replayHashes.Count;
return (score, identicalCount, mismatches);
}
}

View File

@@ -0,0 +1,107 @@
namespace StellaOps.Scanner.Worker.Determinism.Calculators;
/// <summary>
/// Calculates Policy Fidelity (PF) by comparing final policy decisions.
/// </summary>
public sealed class PolicyFidelityCalculator
{
/// <summary>
/// Computes PF by comparing policy decisions.
/// </summary>
public (double Score, int MatchCount, List<FidelityMismatch> Mismatches) Calculate(
PolicyDecision baseline,
IReadOnlyList<PolicyDecision> replays)
{
ArgumentNullException.ThrowIfNull(baseline);
ArgumentNullException.ThrowIfNull(replays);
if (replays.Count == 0)
return (1.0, 0, []);
var matchCount = 0;
var mismatches = new List<FidelityMismatch>();
for (var i = 0; i < replays.Count; i++)
{
var replay = replays[i];
var (isMatch, differences) = CompareDecisions(baseline, replay);
if (isMatch)
{
matchCount++;
}
else
{
mismatches.Add(new FidelityMismatch
{
RunIndex = i,
Type = FidelityMismatchType.PolicyDrift,
Description = $"Policy decision differs: {string.Join(", ", differences)}",
AffectedArtifacts = differences
});
}
}
var score = (double)matchCount / replays.Count;
return (score, matchCount, mismatches);
}
private static (bool IsMatch, List<string> Differences) CompareDecisions(
PolicyDecision a,
PolicyDecision b)
{
var differences = new List<string>();
// Compare overall outcome
if (a.Passed != b.Passed)
differences.Add($"outcome:{a.Passed}→{b.Passed}");
// Compare reason codes (order-independent)
var aReasons = a.ReasonCodes.OrderBy(r => r, StringComparer.Ordinal).ToList();
var bReasons = b.ReasonCodes.OrderBy(r => r, StringComparer.Ordinal).ToList();
if (!aReasons.SequenceEqual(bReasons))
differences.Add("reason_codes");
// Compare violation count
if (a.ViolationCount != b.ViolationCount)
differences.Add($"violations:{a.ViolationCount}→{b.ViolationCount}");
// Compare block level
if (!string.Equals(a.BlockLevel, b.BlockLevel, StringComparison.Ordinal))
differences.Add($"block_level:{a.BlockLevel}→{b.BlockLevel}");
return (differences.Count == 0, differences);
}
}
/// <summary>
/// Represents a policy decision for fidelity comparison.
/// </summary>
public sealed record PolicyDecision
{
/// <summary>
/// Whether the policy evaluation passed (true) or failed (false).
/// </summary>
public required bool Passed { get; init; }
/// <summary>
/// List of reason codes explaining the decision.
/// </summary>
public required IReadOnlyList<string> ReasonCodes { get; init; }
/// <summary>
/// Number of policy violations.
/// </summary>
public required int ViolationCount { get; init; }
/// <summary>
/// Block level: "none", "warn", "block"
/// </summary>
public required string BlockLevel { get; init; }
/// <summary>
/// Policy hash used for this decision.
/// </summary>
public string? PolicyHash { get; init; }
}

View File

@@ -0,0 +1,106 @@
namespace StellaOps.Scanner.Worker.Determinism.Calculators;
/// <summary>
/// Calculates Semantic Fidelity (SF) by comparing normalized object structures.
/// Ignores formatting differences; compares packages, versions, CVEs, severities, verdicts.
/// </summary>
public sealed class SemanticFidelityCalculator
{
/// <summary>
/// Computes SF by comparing normalized findings.
/// </summary>
public (double Score, int MatchCount, List<FidelityMismatch> Mismatches) Calculate(
NormalizedFindings baseline,
IReadOnlyList<NormalizedFindings> replays)
{
ArgumentNullException.ThrowIfNull(baseline);
ArgumentNullException.ThrowIfNull(replays);
if (replays.Count == 0)
return (1.0, 0, []);
var matchCount = 0;
var mismatches = new List<FidelityMismatch>();
for (var i = 0; i < replays.Count; i++)
{
var replay = replays[i];
var (isMatch, differences) = CompareNormalized(baseline, replay);
if (isMatch)
{
matchCount++;
}
else
{
mismatches.Add(new FidelityMismatch
{
RunIndex = i,
Type = FidelityMismatchType.SemanticOnly,
Description = $"Semantic differences: {string.Join(", ", differences)}",
AffectedArtifacts = differences
});
}
}
var score = (double)matchCount / replays.Count;
return (score, matchCount, mismatches);
}
private static (bool IsMatch, List<string> Differences) CompareNormalized(
NormalizedFindings a,
NormalizedFindings b)
{
var differences = new List<string>();
// Compare package sets (order-independent)
var aPackages = a.Packages.OrderBy(p => p.Purl, StringComparer.Ordinal)
.ThenBy(p => p.Version, StringComparer.Ordinal)
.ToList();
var bPackages = b.Packages.OrderBy(p => p.Purl, StringComparer.Ordinal)
.ThenBy(p => p.Version, StringComparer.Ordinal)
.ToList();
if (!aPackages.SequenceEqual(bPackages))
differences.Add("packages");
// Compare CVE sets (order-independent)
var aCves = a.Cves.OrderBy(c => c, StringComparer.Ordinal).ToList();
var bCves = b.Cves.OrderBy(c => c, StringComparer.Ordinal).ToList();
if (!aCves.SequenceEqual(bCves))
differences.Add("cves");
// Compare severity counts (order-independent)
var aSeverities = a.SeverityCounts.OrderBy(kvp => kvp.Key, StringComparer.Ordinal).ToList();
var bSeverities = b.SeverityCounts.OrderBy(kvp => kvp.Key, StringComparer.Ordinal).ToList();
if (!aSeverities.SequenceEqual(bSeverities))
differences.Add("severities");
// Compare verdicts (order-independent)
var aVerdicts = a.Verdicts.OrderBy(v => v.Key, StringComparer.Ordinal).ToList();
var bVerdicts = b.Verdicts.OrderBy(v => v.Key, StringComparer.Ordinal).ToList();
if (!aVerdicts.SequenceEqual(bVerdicts))
differences.Add("verdicts");
return (differences.Count == 0, differences);
}
}
/// <summary>
/// Normalized findings for semantic comparison.
/// </summary>
public sealed record NormalizedFindings
{
public required IReadOnlyList<NormalizedPackage> Packages { get; init; }
public required IReadOnlySet<string> Cves { get; init; }
public required IReadOnlyDictionary<string, int> SeverityCounts { get; init; }
public required IReadOnlyDictionary<string, string> Verdicts { get; init; }
}
/// <summary>
/// Normalized package representation for comparison.
/// </summary>
public sealed record NormalizedPackage(string Purl, string Version) : IEquatable<NormalizedPackage>;

View File

@@ -0,0 +1,86 @@
namespace StellaOps.Scanner.Worker.Determinism;
/// <summary>
/// Three-tier fidelity metrics for deterministic reproducibility measurement.
/// All scores are ratios in range [0.0, 1.0].
/// </summary>
public sealed record FidelityMetrics
{
/// <summary>
/// Bitwise Fidelity (BF): identical_outputs / total_replays
/// Target: >= 0.98 (general), >= 0.95 (regulated)
/// </summary>
public required double BitwiseFidelity { get; init; }
/// <summary>
/// Semantic Fidelity (SF): normalized object comparison match ratio
/// Allows formatting differences, compares: packages, versions, CVEs, severities, verdicts
/// </summary>
public required double SemanticFidelity { get; init; }
/// <summary>
/// Policy Fidelity (PF): policy decision match ratio
/// Compares: pass/fail + reason codes
/// Target: ~1.0 unless policy changed intentionally
/// </summary>
public required double PolicyFidelity { get; init; }
/// <summary>
/// Number of replay runs compared.
/// </summary>
public required int TotalReplays { get; init; }
/// <summary>
/// Number of bitwise-identical outputs.
/// </summary>
public required int IdenticalOutputs { get; init; }
/// <summary>
/// Number of semantically-equivalent outputs.
/// </summary>
public required int SemanticMatches { get; init; }
/// <summary>
/// Number of policy-decision matches.
/// </summary>
public required int PolicyMatches { get; init; }
/// <summary>
/// Computed timestamp (UTC).
/// </summary>
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Diagnostic information for non-identical runs.
/// </summary>
public IReadOnlyList<FidelityMismatch>? Mismatches { get; init; }
}
/// <summary>
/// Diagnostic information about a fidelity mismatch.
/// </summary>
public sealed record FidelityMismatch
{
public required int RunIndex { get; init; }
public required FidelityMismatchType Type { get; init; }
public required string Description { get; init; }
public IReadOnlyList<string>? AffectedArtifacts { get; init; }
}
/// <summary>
/// Type of fidelity mismatch.
/// </summary>
public enum FidelityMismatchType
{
/// <summary>Hash differs but content semantically equivalent</summary>
BitwiseOnly,
/// <summary>Content differs but policy decision matches</summary>
SemanticOnly,
/// <summary>Policy decision differs</summary>
PolicyDrift,
/// <summary>All tiers differ</summary>
Full
}

View File

@@ -0,0 +1,209 @@
using StellaOps.Scanner.Worker.Determinism.Calculators;
namespace StellaOps.Scanner.Worker.Determinism;
/// <summary>
/// Service that orchestrates fidelity metric calculation across all three tiers.
/// </summary>
public sealed class FidelityMetricsService
{
private readonly BitwiseFidelityCalculator _bitwiseCalculator;
private readonly SemanticFidelityCalculator _semanticCalculator;
private readonly PolicyFidelityCalculator _policyCalculator;
public FidelityMetricsService()
{
_bitwiseCalculator = new BitwiseFidelityCalculator();
_semanticCalculator = new SemanticFidelityCalculator();
_policyCalculator = new PolicyFidelityCalculator();
}
/// <summary>
/// Computes all three fidelity metrics for a set of replay runs.
/// </summary>
/// <param name="baselineHashes">Artifact hashes from baseline run</param>
/// <param name="replayHashes">Artifact hashes from each replay run</param>
/// <param name="baselineFindings">Normalized findings from baseline</param>
/// <param name="replayFindings">Normalized findings from each replay</param>
/// <param name="baselineDecision">Policy decision from baseline</param>
/// <param name="replayDecisions">Policy decisions from each replay</param>
/// <returns>Complete fidelity metrics</returns>
public FidelityMetrics Calculate(
IReadOnlyDictionary<string, string> baselineHashes,
IReadOnlyList<IReadOnlyDictionary<string, string>> replayHashes,
NormalizedFindings baselineFindings,
IReadOnlyList<NormalizedFindings> replayFindings,
PolicyDecision baselineDecision,
IReadOnlyList<PolicyDecision> replayDecisions)
{
ArgumentNullException.ThrowIfNull(baselineHashes);
ArgumentNullException.ThrowIfNull(replayHashes);
ArgumentNullException.ThrowIfNull(baselineFindings);
ArgumentNullException.ThrowIfNull(replayFindings);
ArgumentNullException.ThrowIfNull(baselineDecision);
ArgumentNullException.ThrowIfNull(replayDecisions);
// Calculate bitwise fidelity
var (bfScore, bfIdentical, bfMismatches) = _bitwiseCalculator.Calculate(
baselineHashes, replayHashes);
// Calculate semantic fidelity
var (sfScore, sfMatches, sfMismatches) = _semanticCalculator.Calculate(
baselineFindings, replayFindings);
// Calculate policy fidelity
var (pfScore, pfMatches, pfMismatches) = _policyCalculator.Calculate(
baselineDecision, replayDecisions);
// Combine mismatches with proper classification
var allMismatches = CombineMismatches(bfMismatches, sfMismatches, pfMismatches);
return new FidelityMetrics
{
BitwiseFidelity = bfScore,
SemanticFidelity = sfScore,
PolicyFidelity = pfScore,
TotalReplays = replayHashes.Count,
IdenticalOutputs = bfIdentical,
SemanticMatches = sfMatches,
PolicyMatches = pfMatches,
ComputedAt = DateTimeOffset.UtcNow,
Mismatches = allMismatches.Count > 0 ? allMismatches : null
};
}
/// <summary>
/// Evaluates whether the fidelity metrics meet the specified thresholds.
/// </summary>
/// <param name="metrics">Computed fidelity metrics</param>
/// <param name="thresholds">Thresholds to check against</param>
/// <param name="isRegulated">Whether this is a regulated project</param>
/// <returns>Evaluation result with pass/fail and reason</returns>
public FidelityEvaluation Evaluate(
FidelityMetrics metrics,
FidelityThresholds thresholds,
bool isRegulated = false)
{
ArgumentNullException.ThrowIfNull(metrics);
ArgumentNullException.ThrowIfNull(thresholds);
var failures = new List<string>();
var bfThreshold = isRegulated
? thresholds.BitwiseFidelityRegulated
: thresholds.BitwiseFidelityGeneral;
if (metrics.BitwiseFidelity < bfThreshold)
failures.Add($"BF {metrics.BitwiseFidelity:P2} < {bfThreshold:P2}");
if (metrics.SemanticFidelity < thresholds.SemanticFidelity)
failures.Add($"SF {metrics.SemanticFidelity:P2} < {thresholds.SemanticFidelity:P2}");
if (metrics.PolicyFidelity < thresholds.PolicyFidelity)
failures.Add($"PF {metrics.PolicyFidelity:P2} < {thresholds.PolicyFidelity:P2}");
var shouldBlock = metrics.BitwiseFidelity < thresholds.BitwiseFidelityBlockThreshold;
return new FidelityEvaluation
{
Passed = failures.Count == 0,
ShouldBlockRelease = shouldBlock,
FailureReasons = failures,
EvaluatedAt = DateTimeOffset.UtcNow
};
}
private static List<FidelityMismatch> CombineMismatches(
List<FidelityMismatch> bfMismatches,
List<FidelityMismatch> sfMismatches,
List<FidelityMismatch> pfMismatches)
{
var combined = new Dictionary<int, FidelityMismatch>();
// Start with bitwise mismatches
foreach (var m in bfMismatches)
{
combined[m.RunIndex] = m;
}
// Upgrade or add semantic mismatches
foreach (var m in sfMismatches)
{
if (combined.TryGetValue(m.RunIndex, out var existing))
{
// Both bitwise and semantic differ
combined[m.RunIndex] = existing with
{
Type = FidelityMismatchType.Full,
Description = $"{existing.Description}; {m.Description}",
AffectedArtifacts = (existing.AffectedArtifacts ?? [])
.Concat(m.AffectedArtifacts ?? [])
.Distinct()
.OrderBy(a => a, StringComparer.Ordinal)
.ToList()
};
}
else
{
combined[m.RunIndex] = m;
}
}
// Upgrade or add policy mismatches
foreach (var m in pfMismatches)
{
if (combined.TryGetValue(m.RunIndex, out var existing))
{
var newType = existing.Type switch
{
FidelityMismatchType.Full => FidelityMismatchType.Full,
_ => FidelityMismatchType.Full
};
combined[m.RunIndex] = existing with
{
Type = newType,
Description = $"{existing.Description}; {m.Description}",
AffectedArtifacts = (existing.AffectedArtifacts ?? [])
.Concat(m.AffectedArtifacts ?? [])
.Distinct()
.OrderBy(a => a, StringComparer.Ordinal)
.ToList()
};
}
else
{
combined[m.RunIndex] = m;
}
}
return combined.Values
.OrderBy(m => m.RunIndex)
.ToList();
}
}
/// <summary>
/// Result of evaluating fidelity metrics against thresholds.
/// </summary>
public sealed record FidelityEvaluation
{
/// <summary>
/// Whether all thresholds were met.
/// </summary>
public required bool Passed { get; init; }
/// <summary>
/// Whether the release should be blocked (BF below critical threshold).
/// </summary>
public required bool ShouldBlockRelease { get; init; }
/// <summary>
/// List of threshold violations.
/// </summary>
public required IReadOnlyList<string> FailureReasons { get; init; }
/// <summary>
/// Timestamp of evaluation.
/// </summary>
public required DateTimeOffset EvaluatedAt { get; init; }
}

View File

@@ -0,0 +1,42 @@
namespace StellaOps.Scanner.Worker.Determinism;
/// <summary>
/// SLO thresholds for fidelity metrics.
/// </summary>
public sealed record FidelityThresholds
{
/// <summary>
/// Minimum BF for general workloads (default: 0.98)
/// </summary>
public double BitwiseFidelityGeneral { get; init; } = 0.98;
/// <summary>
/// Minimum BF for regulated projects (default: 0.95)
/// </summary>
public double BitwiseFidelityRegulated { get; init; } = 0.95;
/// <summary>
/// Minimum SF (default: 0.99)
/// </summary>
public double SemanticFidelity { get; init; } = 0.99;
/// <summary>
/// Minimum PF (default: 1.0 unless policy changed)
/// </summary>
public double PolicyFidelity { get; init; } = 1.0;
/// <summary>
/// Week-over-week BF drop that triggers warning (default: 0.02 = 2%)
/// </summary>
public double BitwiseFidelityWarnDrop { get; init; } = 0.02;
/// <summary>
/// Overall BF that triggers page/block release (default: 0.90)
/// </summary>
public double BitwiseFidelityBlockThreshold { get; init; } = 0.90;
/// <summary>
/// Default thresholds.
/// </summary>
public static FidelityThresholds Default => new();
}

View File

@@ -173,6 +173,14 @@ internal sealed class DotNetCallgraphBuilder
var isVirtual = (methodDef.Attributes & MethodAttributes.Virtual) != 0;
var isGeneric = methodDef.GetGenericParameters().Count > 0;
// Extract visibility from MethodAttributes
var visibility = ExtractVisibility(methodDef.Attributes);
// Determine if this method is an entrypoint candidate
var isTypePublic = (typeDef.Attributes & TypeAttributes.Public) != 0 ||
(typeDef.Attributes & TypeAttributes.NestedPublic) != 0;
var isEntrypointCandidate = isPublic && isTypePublic && !methodName.StartsWith("<");
var node = new DotNetMethodNode(
MethodId: methodId,
AssemblyName: assemblyName,
@@ -186,7 +194,9 @@ internal sealed class DotNetCallgraphBuilder
IsStatic: isStatic,
IsPublic: isPublic,
IsVirtual: isVirtual,
IsGeneric: isGeneric);
IsGeneric: isGeneric,
Visibility: visibility,
IsEntrypointCandidate: isEntrypointCandidate);
_methods.TryAdd(methodId, node);
@@ -254,6 +264,7 @@ internal sealed class DotNetCallgraphBuilder
!methodName.StartsWith("get_") && !methodName.StartsWith("set_") &&
methodName != ".ctor")
{
var (routeTemplate, httpMethod) = ExtractRouteInfo(metadata, methodDef.GetCustomAttributes());
var rootId = DotNetGraphIdentifiers.ComputeRootId(DotNetRootPhase.Runtime, rootOrder++, methodId);
_roots.Add(new DotNetSyntheticRoot(
RootId: rootId,
@@ -262,14 +273,29 @@ internal sealed class DotNetCallgraphBuilder
Source: "ControllerAction",
AssemblyPath: assemblyPath,
Phase: DotNetRootPhase.Runtime,
Order: rootOrder - 1));
Order: rootOrder - 1,
RouteTemplate: routeTemplate,
HttpMethod: httpMethod,
Framework: DotNetEntrypointFramework.AspNetCore));
}
// Test methods (xUnit, NUnit, MSTest)
var testFramework = DotNetEntrypointFramework.Unknown;
if (HasAttribute(metadata, methodDef.GetCustomAttributes(), "Xunit.FactAttribute") ||
HasAttribute(metadata, methodDef.GetCustomAttributes(), "Xunit.TheoryAttribute") ||
HasAttribute(metadata, methodDef.GetCustomAttributes(), "NUnit.Framework.TestAttribute") ||
HasAttribute(metadata, methodDef.GetCustomAttributes(), "Microsoft.VisualStudio.TestTools.UnitTesting.TestMethodAttribute"))
HasAttribute(metadata, methodDef.GetCustomAttributes(), "Xunit.TheoryAttribute"))
{
testFramework = DotNetEntrypointFramework.XUnit;
}
else if (HasAttribute(metadata, methodDef.GetCustomAttributes(), "NUnit.Framework.TestAttribute"))
{
testFramework = DotNetEntrypointFramework.NUnit;
}
else if (HasAttribute(metadata, methodDef.GetCustomAttributes(), "Microsoft.VisualStudio.TestTools.UnitTesting.TestMethodAttribute"))
{
testFramework = DotNetEntrypointFramework.MSTest;
}
if (testFramework != DotNetEntrypointFramework.Unknown)
{
var rootId = DotNetGraphIdentifiers.ComputeRootId(DotNetRootPhase.Runtime, rootOrder++, methodId);
_roots.Add(new DotNetSyntheticRoot(
@@ -279,7 +305,8 @@ internal sealed class DotNetCallgraphBuilder
Source: "TestMethod",
AssemblyPath: assemblyPath,
Phase: DotNetRootPhase.Runtime,
Order: rootOrder - 1));
Order: rootOrder - 1,
Framework: testFramework));
}
// Azure Functions
@@ -294,7 +321,8 @@ internal sealed class DotNetCallgraphBuilder
Source: "AzureFunction",
AssemblyPath: assemblyPath,
Phase: DotNetRootPhase.Runtime,
Order: rootOrder - 1));
Order: rootOrder - 1,
Framework: DotNetEntrypointFramework.AzureFunctions));
}
// AWS Lambda
@@ -308,10 +336,120 @@ internal sealed class DotNetCallgraphBuilder
Source: "LambdaHandler",
AssemblyPath: assemblyPath,
Phase: DotNetRootPhase.Runtime,
Order: rootOrder - 1));
Order: rootOrder - 1,
Framework: DotNetEntrypointFramework.AwsLambda));
}
}
private static (string? RouteTemplate, string? HttpMethod) ExtractRouteInfo(
MetadataReader metadata,
CustomAttributeHandleCollection attributes)
{
string? routeTemplate = null;
string? httpMethod = null;
foreach (var attrHandle in attributes)
{
var attr = metadata.GetCustomAttribute(attrHandle);
var ctorHandle = attr.Constructor;
string? typeName = null;
switch (ctorHandle.Kind)
{
case HandleKind.MemberReference:
var memberRef = metadata.GetMemberReference((MemberReferenceHandle)ctorHandle);
if (memberRef.Parent.Kind == HandleKind.TypeReference)
{
var typeRef = metadata.GetTypeReference((TypeReferenceHandle)memberRef.Parent);
typeName = GetTypeRefName(metadata, typeRef);
}
break;
case HandleKind.MethodDefinition:
var methodDef = metadata.GetMethodDefinition((MethodDefinitionHandle)ctorHandle);
var declaringType = metadata.GetTypeDefinition(methodDef.GetDeclaringType());
typeName = GetFullTypeName(metadata, declaringType);
break;
}
if (typeName is null)
{
continue;
}
// Extract route template from [Route] attribute
if (typeName.Contains("RouteAttribute"))
{
routeTemplate ??= TryExtractStringArgument(metadata, attr);
}
// Extract HTTP method and optional route from Http*Attribute
if (typeName.Contains("HttpGetAttribute"))
{
httpMethod = "GET";
routeTemplate ??= TryExtractStringArgument(metadata, attr);
}
else if (typeName.Contains("HttpPostAttribute"))
{
httpMethod = "POST";
routeTemplate ??= TryExtractStringArgument(metadata, attr);
}
else if (typeName.Contains("HttpPutAttribute"))
{
httpMethod = "PUT";
routeTemplate ??= TryExtractStringArgument(metadata, attr);
}
else if (typeName.Contains("HttpDeleteAttribute"))
{
httpMethod = "DELETE";
routeTemplate ??= TryExtractStringArgument(metadata, attr);
}
else if (typeName.Contains("HttpPatchAttribute"))
{
httpMethod = "PATCH";
routeTemplate ??= TryExtractStringArgument(metadata, attr);
}
}
return (routeTemplate, httpMethod);
}
private static string? TryExtractStringArgument(MetadataReader metadata, CustomAttribute attr)
{
// Simplified extraction - read first string argument from attribute blob
// Full implementation would properly parse the custom attribute blob
try
{
var value = attr.DecodeValue(new SimpleAttributeProvider());
if (value.FixedArguments.Length > 0 &&
value.FixedArguments[0].Value is string strValue &&
!string.IsNullOrEmpty(strValue))
{
return strValue;
}
}
catch
{
// Attribute decoding failed - not critical
}
return null;
}
/// <summary>
/// Simple attribute type provider for decoding custom attributes.
/// </summary>
private sealed class SimpleAttributeProvider : ICustomAttributeTypeProvider<object?>
{
public object? GetPrimitiveType(PrimitiveTypeCode typeCode) => null;
public object? GetTypeFromDefinition(MetadataReader reader, TypeDefinitionHandle handle, byte rawTypeKind) => null;
public object? GetTypeFromReference(MetadataReader reader, TypeReferenceHandle handle, byte rawTypeKind) => null;
public object? GetSZArrayType(object? elementType) => null;
public object? GetSystemType() => typeof(Type);
public object? GetTypeFromSerializedName(string name) => Type.GetType(name);
public PrimitiveTypeCode GetUnderlyingEnumType(object? type) => PrimitiveTypeCode.Int32;
public bool IsSystemType(object? type) => type is Type;
}
private void ExtractCallEdgesFromType(
MetadataReader metadata,
TypeDefinition typeDef,
@@ -390,15 +528,15 @@ internal sealed class DotNetCallgraphBuilder
var token = BitConverter.ToInt32(ilBytes, offset);
offset += 4;
var edgeType = opcode switch
var (edgeType, edgeReason) = opcode switch
{
0x28 => DotNetEdgeType.Call,
0x6F => DotNetEdgeType.CallVirt,
0x73 => DotNetEdgeType.NewObj,
_ => DotNetEdgeType.Call,
0x28 => (DotNetEdgeType.Call, DotNetEdgeReason.DirectCall),
0x6F => (DotNetEdgeType.CallVirt, DotNetEdgeReason.VirtualCall),
0x73 => (DotNetEdgeType.NewObj, DotNetEdgeReason.NewObj),
_ => (DotNetEdgeType.Call, DotNetEdgeReason.DirectCall),
};
AddCallEdge(metadata, callerId, token, ilOffset, edgeType, assemblyName, assemblyPath);
AddCallEdge(metadata, callerId, token, ilOffset, edgeType, edgeReason, assemblyName, assemblyPath);
break;
}
case 0xFE06: // ldftn (0xFE 0x06)
@@ -413,7 +551,7 @@ internal sealed class DotNetCallgraphBuilder
offset += 4;
var edgeType = opcode == 0xFE06 ? DotNetEdgeType.LdFtn : DotNetEdgeType.LdVirtFtn;
AddCallEdge(metadata, callerId, token, ilOffset, edgeType, assemblyName, assemblyPath);
AddCallEdge(metadata, callerId, token, ilOffset, edgeType, DotNetEdgeReason.DelegateCreate, assemblyName, assemblyPath);
break;
}
case 0x29: // calli
@@ -436,6 +574,7 @@ internal sealed class DotNetCallgraphBuilder
CalleePurl: null,
CalleeMethodDigest: null,
EdgeType: DotNetEdgeType.CallI,
EdgeReason: DotNetEdgeReason.IndirectCall,
ILOffset: ilOffset,
IsResolved: false,
Confidence: 0.2));
@@ -470,6 +609,7 @@ internal sealed class DotNetCallgraphBuilder
int token,
int ilOffset,
DotNetEdgeType edgeType,
DotNetEdgeReason edgeReason,
string assemblyName,
string assemblyPath)
{
@@ -517,8 +657,8 @@ internal sealed class DotNetCallgraphBuilder
case HandleKind.MethodSpecification:
{
var methodSpec = metadata.GetMethodSpecification((MethodSpecificationHandle)handle);
// Recursively resolve the generic method
AddCallEdge(metadata, callerId, MetadataTokens.GetToken(methodSpec.Method), ilOffset, edgeType, assemblyName, assemblyPath);
// Recursively resolve the generic method - use GenericInstantiation reason
AddCallEdge(metadata, callerId, MetadataTokens.GetToken(methodSpec.Method), ilOffset, edgeType, DotNetEdgeReason.GenericInstantiation, assemblyName, assemblyPath);
return;
}
default:
@@ -549,6 +689,7 @@ internal sealed class DotNetCallgraphBuilder
CalleePurl: calleePurl,
CalleeMethodDigest: null,
EdgeType: edgeType,
EdgeReason: edgeReason,
ILOffset: ilOffset,
IsResolved: isResolved,
Confidence: isResolved ? 1.0 : 0.7));
@@ -788,4 +929,19 @@ internal sealed class DotNetCallgraphBuilder
_ => 1, // default for unrecognized
};
}
private static DotNetVisibility ExtractVisibility(MethodAttributes attributes)
{
var accessMask = attributes & MethodAttributes.MemberAccessMask;
return accessMask switch
{
MethodAttributes.Public => DotNetVisibility.Public,
MethodAttributes.Private => DotNetVisibility.Private,
MethodAttributes.Family => DotNetVisibility.Protected,
MethodAttributes.Assembly => DotNetVisibility.Internal,
MethodAttributes.FamORAssem => DotNetVisibility.ProtectedInternal,
MethodAttributes.FamANDAssem => DotNetVisibility.PrivateProtected,
_ => DotNetVisibility.Private
};
}
}

View File

@@ -32,6 +32,8 @@ public sealed record DotNetReachabilityGraph(
/// <param name="IsPublic">Whether the method is public.</param>
/// <param name="IsVirtual">Whether the method is virtual.</param>
/// <param name="IsGeneric">Whether the method has generic parameters.</param>
/// <param name="Visibility">Access visibility (public, private, protected, internal, etc.).</param>
/// <param name="IsEntrypointCandidate">Whether this method could be an entrypoint (public, controller action, etc.).</param>
public sealed record DotNetMethodNode(
string MethodId,
string AssemblyName,
@@ -45,7 +47,33 @@ public sealed record DotNetMethodNode(
bool IsStatic,
bool IsPublic,
bool IsVirtual,
bool IsGeneric);
bool IsGeneric,
DotNetVisibility Visibility,
bool IsEntrypointCandidate);
/// <summary>
/// Access visibility levels for .NET methods.
/// </summary>
public enum DotNetVisibility
{
/// <summary>Accessible from anywhere.</summary>
Public,
/// <summary>Accessible only within the same type.</summary>
Private,
/// <summary>Accessible within the same type or derived types.</summary>
Protected,
/// <summary>Accessible within the same assembly.</summary>
Internal,
/// <summary>Accessible within the same assembly or derived types.</summary>
ProtectedInternal,
/// <summary>Accessible only within derived types in the same assembly.</summary>
PrivateProtected
}
/// <summary>
/// A call edge in the .NET call graph.
@@ -56,6 +84,7 @@ public sealed record DotNetMethodNode(
/// <param name="CalleePurl">PURL of the callee if resolvable.</param>
/// <param name="CalleeMethodDigest">Method digest of the callee.</param>
/// <param name="EdgeType">Type of edge (call instruction type).</param>
/// <param name="EdgeReason">Semantic reason for the edge (DirectCall, VirtualCall, etc.).</param>
/// <param name="ILOffset">IL offset where call occurs.</param>
/// <param name="IsResolved">Whether the callee was successfully resolved.</param>
/// <param name="Confidence">Confidence level (1.0 for resolved, lower for heuristic).</param>
@@ -66,6 +95,7 @@ public sealed record DotNetCallEdge(
string? CalleePurl,
string? CalleeMethodDigest,
DotNetEdgeType EdgeType,
DotNetEdgeReason EdgeReason,
int ILOffset,
bool IsResolved,
double Confidence);
@@ -103,6 +133,52 @@ public enum DotNetEdgeType
Dynamic,
}
/// <summary>
/// Semantic reason for why a .NET edge exists.
/// Maps to the schema's EdgeReason enum for explainability.
/// </summary>
public enum DotNetEdgeReason
{
/// <summary>Direct method call (call opcode).</summary>
DirectCall,
/// <summary>Virtual/interface dispatch (callvirt opcode).</summary>
VirtualCall,
/// <summary>Reflection-based invocation (Type.GetMethod, etc.).</summary>
ReflectionString,
/// <summary>Dependency injection binding.</summary>
DiBinding,
/// <summary>Dynamic import or late binding.</summary>
DynamicImport,
/// <summary>Constructor/object instantiation (newobj opcode).</summary>
NewObj,
/// <summary>Delegate/function pointer creation (ldftn, ldvirtftn).</summary>
DelegateCreate,
/// <summary>Async/await continuation.</summary>
AsyncContinuation,
/// <summary>Event handler subscription.</summary>
EventHandler,
/// <summary>Generic type instantiation.</summary>
GenericInstantiation,
/// <summary>Native interop (P/Invoke).</summary>
NativeInterop,
/// <summary>Indirect call through function pointer (calli).</summary>
IndirectCall,
/// <summary>Reason could not be determined.</summary>
Unknown
}
/// <summary>
/// A synthetic root in the .NET call graph.
/// </summary>
@@ -114,6 +190,9 @@ public enum DotNetEdgeType
/// <param name="Phase">Execution phase.</param>
/// <param name="Order">Order within the phase.</param>
/// <param name="IsResolved">Whether the target was successfully resolved.</param>
/// <param name="RouteTemplate">HTTP route template if applicable (e.g., "/api/orders/{id}").</param>
/// <param name="HttpMethod">HTTP method if applicable (GET, POST, etc.).</param>
/// <param name="Framework">Framework exposing this entrypoint.</param>
public sealed record DotNetSyntheticRoot(
string RootId,
string TargetId,
@@ -122,7 +201,43 @@ public sealed record DotNetSyntheticRoot(
string AssemblyPath,
DotNetRootPhase Phase,
int Order,
bool IsResolved = true);
bool IsResolved = true,
string? RouteTemplate = null,
string? HttpMethod = null,
DotNetEntrypointFramework Framework = DotNetEntrypointFramework.Unknown);
/// <summary>
/// Frameworks that expose .NET entrypoints.
/// </summary>
public enum DotNetEntrypointFramework
{
/// <summary>Unknown framework.</summary>
Unknown,
/// <summary>ASP.NET Core MVC/WebAPI.</summary>
AspNetCore,
/// <summary>ASP.NET Core Minimal APIs.</summary>
MinimalApi,
/// <summary>gRPC for .NET.</summary>
Grpc,
/// <summary>Azure Functions.</summary>
AzureFunctions,
/// <summary>AWS Lambda.</summary>
AwsLambda,
/// <summary>xUnit test framework.</summary>
XUnit,
/// <summary>NUnit test framework.</summary>
NUnit,
/// <summary>MSTest framework.</summary>
MSTest
}
/// <summary>
/// Execution phase for .NET synthetic roots.

View File

@@ -108,12 +108,12 @@ internal sealed class JavaCallgraphBuilder
var edgeId = JavaGraphIdentifiers.ComputeEdgeId(callerId, calleeId, edge.InstructionOffset);
var confidence = edge.Confidence == JavaReflectionConfidence.High ? 0.9 : 0.5;
var edgeType = edge.Reason switch
var (edgeType, edgeReason) = edge.Reason switch
{
JavaReflectionReason.ClassForName => JavaEdgeType.Reflection,
JavaReflectionReason.ClassLoaderLoadClass => JavaEdgeType.Reflection,
JavaReflectionReason.ServiceLoaderLoad => JavaEdgeType.ServiceLoader,
_ => JavaEdgeType.Reflection,
JavaReflectionReason.ClassForName => (JavaEdgeType.Reflection, JavaEdgeReason.ReflectionString),
JavaReflectionReason.ClassLoaderLoadClass => (JavaEdgeType.Reflection, JavaEdgeReason.ReflectionString),
JavaReflectionReason.ServiceLoaderLoad => (JavaEdgeType.ServiceLoader, JavaEdgeReason.ServiceLoader),
_ => (JavaEdgeType.Reflection, JavaEdgeReason.ReflectionString),
};
_edges.Add(new JavaCallEdge(
@@ -123,6 +123,7 @@ internal sealed class JavaCallgraphBuilder
CalleePurl: null, // Reflection targets often unknown
CalleeMethodDigest: null,
EdgeType: edgeType,
EdgeReason: edgeReason,
BytecodeOffset: edge.InstructionOffset,
IsResolved: isResolved,
Confidence: confidence));
@@ -229,6 +230,16 @@ internal sealed class JavaCallgraphBuilder
var isSynthetic = (method.AccessFlags & 0x1000) != 0;
var isBridge = (method.AccessFlags & 0x0040) != 0;
// Extract visibility from access flags
var visibility = ExtractVisibility(method.AccessFlags);
// Determine if this method is an entrypoint candidate
// Public non-synthetic methods that aren't constructors or accessors
var isEntrypointCandidate = isPublic &&
!isSynthetic &&
!method.Name.StartsWith("<") &&
!method.Name.StartsWith("lambda$");
var node = new JavaMethodNode(
MethodId: methodId,
ClassName: className,
@@ -241,11 +252,34 @@ internal sealed class JavaCallgraphBuilder
IsStatic: isStatic,
IsPublic: isPublic,
IsSynthetic: isSynthetic,
IsBridge: isBridge);
IsBridge: isBridge,
Visibility: visibility,
IsEntrypointCandidate: isEntrypointCandidate);
_methods.TryAdd(methodId, node);
}
private static JavaVisibility ExtractVisibility(int accessFlags)
{
// ACC_PUBLIC = 0x0001, ACC_PRIVATE = 0x0002, ACC_PROTECTED = 0x0004
if ((accessFlags & 0x0001) != 0)
{
return JavaVisibility.Public;
}
else if ((accessFlags & 0x0002) != 0)
{
return JavaVisibility.Private;
}
else if ((accessFlags & 0x0004) != 0)
{
return JavaVisibility.Protected;
}
else
{
return JavaVisibility.Package; // Package-private (default)
}
}
private void FindSyntheticRoots(string className, JavaClassFileParser.ClassFile classFile, string jarPath)
{
var rootOrder = 0;
@@ -380,13 +414,14 @@ internal sealed class JavaCallgraphBuilder
methodRef.Value.Name,
methodRef.Value.Descriptor);
var edgeType = opcode switch
var (edgeType, edgeReason) = opcode switch
{
0xB8 => JavaEdgeType.InvokeStatic,
0xB6 => JavaEdgeType.InvokeVirtual,
0xB7 => methodRef.Value.Name == "<init>" ? JavaEdgeType.Constructor : JavaEdgeType.InvokeSpecial,
0xB9 => JavaEdgeType.InvokeInterface,
_ => JavaEdgeType.InvokeVirtual,
0xB8 => (JavaEdgeType.InvokeStatic, JavaEdgeReason.DirectCall),
0xB6 => (JavaEdgeType.InvokeVirtual, JavaEdgeReason.VirtualCall),
0xB7 when methodRef.Value.Name == "<init>" => (JavaEdgeType.Constructor, JavaEdgeReason.NewObj),
0xB7 => (JavaEdgeType.InvokeSpecial, JavaEdgeReason.SuperCall),
0xB9 => (JavaEdgeType.InvokeInterface, JavaEdgeReason.InterfaceCall),
_ => (JavaEdgeType.InvokeVirtual, JavaEdgeReason.VirtualCall),
};
// Check if target is resolved (known in our method set)
@@ -403,6 +438,7 @@ internal sealed class JavaCallgraphBuilder
CalleePurl: calleePurl,
CalleeMethodDigest: null, // Would compute if method is in our set
EdgeType: edgeType,
EdgeReason: edgeReason,
BytecodeOffset: instructionOffset,
IsResolved: isResolved,
Confidence: isResolved ? 1.0 : 0.7));
@@ -448,6 +484,7 @@ internal sealed class JavaCallgraphBuilder
CalleePurl: null,
CalleeMethodDigest: null,
EdgeType: JavaEdgeType.InvokeDynamic,
EdgeReason: JavaEdgeReason.DynamicImport,
BytecodeOffset: instructionOffset,
IsResolved: false,
Confidence: 0.3));

View File

@@ -31,6 +31,8 @@ public sealed record JavaReachabilityGraph(
/// <param name="IsPublic">Whether the method is public.</param>
/// <param name="IsSynthetic">Whether the method is synthetic (compiler-generated).</param>
/// <param name="IsBridge">Whether the method is a bridge method.</param>
/// <param name="Visibility">Access visibility (public, private, protected, package).</param>
/// <param name="IsEntrypointCandidate">Whether this method could be an entrypoint (public, controller action, etc.).</param>
public sealed record JavaMethodNode(
string MethodId,
string ClassName,
@@ -43,7 +45,27 @@ public sealed record JavaMethodNode(
bool IsStatic,
bool IsPublic,
bool IsSynthetic,
bool IsBridge);
bool IsBridge,
JavaVisibility Visibility,
bool IsEntrypointCandidate);
/// <summary>
/// Access visibility levels for Java methods.
/// </summary>
public enum JavaVisibility
{
/// <summary>Accessible from anywhere.</summary>
Public,
/// <summary>Accessible only within the same class.</summary>
Private,
/// <summary>Accessible within the same package or subclasses.</summary>
Protected,
/// <summary>Package-private (default access).</summary>
Package
}
/// <summary>
/// A call edge in the Java call graph.
@@ -54,6 +76,7 @@ public sealed record JavaMethodNode(
/// <param name="CalleePurl">PURL of the callee if resolvable.</param>
/// <param name="CalleeMethodDigest">Method digest of the callee.</param>
/// <param name="EdgeType">Type of edge (invoke type).</param>
/// <param name="EdgeReason">Semantic reason for the edge (DirectCall, VirtualCall, etc.).</param>
/// <param name="BytecodeOffset">Bytecode offset where call occurs.</param>
/// <param name="IsResolved">Whether the callee was successfully resolved.</param>
/// <param name="Confidence">Confidence level (1.0 for resolved, lower for heuristic).</param>
@@ -64,6 +87,7 @@ public sealed record JavaCallEdge(
string? CalleePurl,
string? CalleeMethodDigest,
JavaEdgeType EdgeType,
JavaEdgeReason EdgeReason,
int BytecodeOffset,
bool IsResolved,
double Confidence);
@@ -98,6 +122,46 @@ public enum JavaEdgeType
Constructor,
}
/// <summary>
/// Semantic reason for why a Java edge exists.
/// Maps to the schema's EdgeReason enum for explainability.
/// </summary>
public enum JavaEdgeReason
{
/// <summary>Direct static method call (invokestatic).</summary>
DirectCall,
/// <summary>Virtual method dispatch (invokevirtual, invokeinterface).</summary>
VirtualCall,
/// <summary>Reflection-based invocation (Class.forName, Method.invoke).</summary>
ReflectionString,
/// <summary>Dependency injection binding (Spring, Guice).</summary>
DiBinding,
/// <summary>Dynamic lambda or method reference (invokedynamic).</summary>
DynamicImport,
/// <summary>Constructor/object instantiation (invokespecial &lt;init&gt;).</summary>
NewObj,
/// <summary>Super or private method call (invokespecial non-init).</summary>
SuperCall,
/// <summary>ServiceLoader-based service discovery.</summary>
ServiceLoader,
/// <summary>Interface method dispatch.</summary>
InterfaceCall,
/// <summary>Native interop (JNI).</summary>
NativeInterop,
/// <summary>Reason could not be determined.</summary>
Unknown
}
/// <summary>
/// A synthetic root in the Java call graph.
/// </summary>

View File

@@ -258,6 +258,9 @@ internal sealed class NativeCallgraphBuilder
var isResolved = targetSym.Value != 0 || targetSym.SectionIndex != 0;
var calleePurl = isResolved ? GeneratePurl(elf.Path, targetSym.Name) : null;
// Determine edge reason based on whether target is external
var edgeReason = isResolved ? NativeEdgeReason.DirectCall : NativeEdgeReason.NativeInterop;
_edges.Add(new NativeCallEdge(
EdgeId: edgeId,
CallerId: callerId,
@@ -265,6 +268,7 @@ internal sealed class NativeCallgraphBuilder
CalleePurl: calleePurl,
CalleeSymbolDigest: calleeDigest,
EdgeType: NativeEdgeType.Relocation,
EdgeReason: edgeReason,
CallSiteOffset: reloc.Offset,
IsResolved: isResolved,
Confidence: isResolved ? 1.0 : 0.5));
@@ -321,6 +325,7 @@ internal sealed class NativeCallgraphBuilder
CalleePurl: GeneratePurl(elf.Path, targetSym.Name),
CalleeSymbolDigest: targetDigest,
EdgeType: NativeEdgeType.InitArray,
EdgeReason: NativeEdgeReason.InitCallback,
CallSiteOffset: (ulong)idx,
IsResolved: true,
Confidence: 1.0));

View File

@@ -49,6 +49,7 @@ public sealed record NativeFunctionNode(
/// <param name="CalleePurl">PURL of the callee if resolvable.</param>
/// <param name="CalleeSymbolDigest">Symbol digest of the callee.</param>
/// <param name="EdgeType">Type of edge (direct, plt, got, reloc).</param>
/// <param name="EdgeReason">Semantic reason for the edge (DirectCall, NativeInterop, etc.).</param>
/// <param name="CallSiteOffset">Offset within caller where call occurs.</param>
/// <param name="IsResolved">Whether the callee was successfully resolved.</param>
/// <param name="Confidence">Confidence level (1.0 for resolved, lower for heuristic).</param>
@@ -59,10 +60,30 @@ public sealed record NativeCallEdge(
string? CalleePurl,
string? CalleeSymbolDigest,
NativeEdgeType EdgeType,
NativeEdgeReason EdgeReason,
ulong CallSiteOffset,
bool IsResolved,
double Confidence);
/// <summary>
/// Semantic reason for why a native edge exists.
/// Maps to the schema's EdgeReason enum for explainability.
/// </summary>
public enum NativeEdgeReason
{
/// <summary>Direct function call within the same binary.</summary>
DirectCall,
/// <summary>Call through PLT/GOT to external library (native interop).</summary>
NativeInterop,
/// <summary>Initialization or finalization callback.</summary>
InitCallback,
/// <summary>Indirect call through function pointer (unknown target).</summary>
Unknown
}
/// <summary>
/// Type of call edge.
/// </summary>

View File

@@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Scanner.Core.Configuration;
/// <summary>
/// Configuration for offline kit operations.
/// </summary>
public sealed class OfflineKitOptions
{
public const string SectionName = "Scanner:OfflineKit";
/// <summary>
/// Enables offline kit operations for this host.
/// Default: false (opt-in)
/// </summary>
public bool Enabled { get; set; }
/// <summary>
/// When true, import fails if DSSE/Rekor verification fails.
/// When false, verification failures are logged as warnings but import proceeds.
/// Default: true
/// </summary>
public bool RequireDsse { get; set; } = true;
/// <summary>
/// When true, Rekor verification uses only local snapshots.
/// No online Rekor API calls are attempted.
/// Default: true (for air-gap safety)
/// </summary>
public bool RekorOfflineMode { get; set; } = true;
/// <summary>
/// URL of the internal attestation verifier service.
/// Optional; if not set, verification is performed locally.
/// </summary>
public string? AttestationVerifier { get; set; }
/// <summary>
/// Trust anchors for signature verification.
/// Matched by PURL pattern; first match wins.
/// </summary>
public List<TrustAnchorConfig> TrustAnchors { get; set; } = new();
/// <summary>
/// Path to directory containing trust root public keys.
/// Keys are loaded by keyid reference from <see cref="TrustAnchors"/>.
/// </summary>
public string? TrustRootDirectory { get; set; }
/// <summary>
/// Path to offline Rekor snapshot directory.
/// Contains checkpoint.sig and entries/*.jsonl
/// </summary>
public string? RekorSnapshotDirectory { get; set; }
}

View File

@@ -0,0 +1,142 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Core.TrustAnchors;
namespace StellaOps.Scanner.Core.Configuration;
public sealed class OfflineKitOptionsValidator : IValidateOptions<OfflineKitOptions>
{
public ValidateOptionsResult Validate(string? name, OfflineKitOptions options)
{
if (options is null)
{
return ValidateOptionsResult.Fail("OfflineKit options must be provided.");
}
if (!options.Enabled)
{
return ValidateOptionsResult.Success;
}
var errors = new List<string>();
if (!string.IsNullOrWhiteSpace(options.AttestationVerifier))
{
if (!Uri.TryCreate(options.AttestationVerifier, UriKind.Absolute, out _))
{
errors.Add("AttestationVerifier must be an absolute URI when provided.");
}
}
options.TrustAnchors ??= new List<TrustAnchorConfig>();
if (options.RequireDsse && options.TrustAnchors.Count == 0)
{
errors.Add("RequireDsse is true but no TrustAnchors are configured.");
}
if (options.TrustAnchors.Count > 0)
{
if (string.IsNullOrWhiteSpace(options.TrustRootDirectory))
{
errors.Add("TrustRootDirectory must be configured when TrustAnchors are present.");
}
else if (!Directory.Exists(options.TrustRootDirectory))
{
errors.Add($"TrustRootDirectory does not exist: {options.TrustRootDirectory}");
}
}
if (options.RekorOfflineMode)
{
if (string.IsNullOrWhiteSpace(options.RekorSnapshotDirectory))
{
errors.Add("RekorSnapshotDirectory must be configured when RekorOfflineMode is enabled.");
}
else if (!Directory.Exists(options.RekorSnapshotDirectory))
{
errors.Add($"RekorSnapshotDirectory does not exist: {options.RekorSnapshotDirectory}");
}
}
foreach (var anchor in options.TrustAnchors)
{
if (string.IsNullOrWhiteSpace(anchor.AnchorId))
{
errors.Add("TrustAnchor has empty AnchorId.");
}
if (string.IsNullOrWhiteSpace(anchor.PurlPattern))
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' has empty PurlPattern.");
}
anchor.AllowedKeyIds ??= new List<string>();
if (anchor.AllowedKeyIds.Count == 0)
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' has no AllowedKeyIds.");
}
if (anchor.MinSignatures < 1)
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' MinSignatures must be >= 1.");
}
else if (anchor.AllowedKeyIds.Count > 0 && anchor.MinSignatures > anchor.AllowedKeyIds.Count)
{
errors.Add(
$"TrustAnchor '{anchor.AnchorId}' MinSignatures ({anchor.MinSignatures}) exceeds AllowedKeyIds count ({anchor.AllowedKeyIds.Count}).");
}
foreach (var keyId in anchor.AllowedKeyIds)
{
if (string.IsNullOrWhiteSpace(keyId))
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' contains an empty AllowedKeyId entry.");
continue;
}
var normalized = TrustAnchorRegistry.NormalizeKeyId(keyId);
if (normalized.Length == 0)
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' contains an empty AllowedKeyId entry.");
continue;
}
if (normalized.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0
|| normalized.Contains(Path.DirectorySeparatorChar)
|| normalized.Contains(Path.AltDirectorySeparatorChar))
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' contains invalid AllowedKeyId '{keyId}'.");
}
}
try
{
_ = new PurlPatternMatcher(anchor.PurlPattern);
}
catch (Exception ex)
{
errors.Add($"TrustAnchor '{anchor.AnchorId}' has invalid PurlPattern: {ex.Message}");
}
}
var duplicateIds = options.TrustAnchors
.Where(anchor => !string.IsNullOrWhiteSpace(anchor.AnchorId))
.GroupBy(anchor => anchor.AnchorId.Trim(), StringComparer.OrdinalIgnoreCase)
.Where(grouping => grouping.Count() > 1)
.Select(grouping => grouping.Key)
.ToList();
if (duplicateIds.Count > 0)
{
errors.Add($"Duplicate TrustAnchor AnchorIds: {string.Join(", ", duplicateIds)}");
}
return errors.Count > 0
? ValidateOptionsResult.Fail(errors)
: ValidateOptionsResult.Success;
}
}

View File

@@ -0,0 +1,47 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Scanner.Core.Configuration;
/// <summary>
/// Trust anchor configuration for ecosystem-specific signing authorities.
/// </summary>
public sealed class TrustAnchorConfig
{
/// <summary>
/// Unique identifier for this trust anchor.
/// Used in audit logs and error messages.
/// </summary>
public string AnchorId { get; set; } = string.Empty;
/// <summary>
/// PURL pattern to match against.
/// Supports glob patterns: "pkg:npm/*", "pkg:maven/org.apache.*", "*".
/// Patterns are matched in order; first match wins.
/// </summary>
public string PurlPattern { get; set; } = "*";
/// <summary>
/// List of allowed key fingerprints (SHA-256 of public key).
/// Format: "sha256:hexstring" or just "hexstring".
/// </summary>
public List<string> AllowedKeyIds { get; set; } = new();
/// <summary>
/// Optional description for documentation/UI purposes.
/// </summary>
public string? Description { get; set; }
/// <summary>
/// When this anchor expires. Null = no expiry.
/// After expiry, anchor is skipped with a warning.
/// </summary>
public DateTimeOffset? ExpiresAt { get; set; }
/// <summary>
/// Minimum required signatures from this anchor.
/// Default: 1 (at least one key must sign)
/// </summary>
public int MinSignatures { get; set; } = 1;
}

View File

@@ -0,0 +1,174 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Core.Drift;
/// <summary>
/// Calculates FN-Drift rate with stratification.
/// </summary>
public sealed class FnDriftCalculator
{
private readonly IClassificationHistoryRepository _repository;
private readonly ILogger<FnDriftCalculator> _logger;
public FnDriftCalculator(
IClassificationHistoryRepository repository,
ILogger<FnDriftCalculator> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Computes FN-Drift for a tenant over a rolling window.
/// </summary>
/// <param name="tenantId">Tenant to calculate for</param>
/// <param name="windowDays">Rolling window in days (default: 30)</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>FN-Drift summary with stratification</returns>
public async Task<FnDrift30dSummary> CalculateAsync(
Guid tenantId,
int windowDays = 30,
CancellationToken cancellationToken = default)
{
var since = DateTimeOffset.UtcNow.AddDays(-windowDays);
var changes = await _repository.GetChangesAsync(tenantId, since, cancellationToken);
var fnTransitions = changes.Where(c => c.IsFnTransition).ToList();
var totalEvaluated = changes.Count;
var summary = new FnDrift30dSummary
{
TenantId = tenantId,
TotalFnTransitions = fnTransitions.Count,
TotalEvaluated = totalEvaluated,
FnDriftPercent = totalEvaluated > 0
? Math.Round((decimal)fnTransitions.Count / totalEvaluated * 100, 4)
: 0,
FeedCaused = fnTransitions.Count(c => c.Cause == DriftCause.FeedDelta),
RuleCaused = fnTransitions.Count(c => c.Cause == DriftCause.RuleDelta),
LatticeCaused = fnTransitions.Count(c => c.Cause == DriftCause.LatticeDelta),
ReachabilityCaused = fnTransitions.Count(c => c.Cause == DriftCause.ReachabilityDelta),
EngineCaused = fnTransitions.Count(c => c.Cause == DriftCause.Engine)
};
_logger.LogInformation(
"FN-Drift for tenant {TenantId}: {Percent}% ({FnCount}/{Total}), " +
"Feed={Feed}, Rule={Rule}, Lattice={Lattice}, Reach={Reach}, Engine={Engine}",
tenantId, summary.FnDriftPercent, summary.TotalFnTransitions, summary.TotalEvaluated,
summary.FeedCaused, summary.RuleCaused, summary.LatticeCaused,
summary.ReachabilityCaused, summary.EngineCaused);
return summary;
}
/// <summary>
/// Determines the drift cause for a classification change.
/// </summary>
public DriftCause DetermineCause(
string? previousFeedVersion,
string? currentFeedVersion,
string? previousRuleHash,
string? currentRuleHash,
string? previousLatticeHash,
string? currentLatticeHash,
bool? previousReachable,
bool? currentReachable)
{
// Priority order: feed > rule > lattice > reachability > engine > other
// Check feed delta
if (!string.Equals(previousFeedVersion, currentFeedVersion, StringComparison.Ordinal))
{
_logger.LogDebug(
"Drift cause: feed_delta (prev={PrevFeed}, curr={CurrFeed})",
previousFeedVersion, currentFeedVersion);
return DriftCause.FeedDelta;
}
// Check rule delta
if (!string.Equals(previousRuleHash, currentRuleHash, StringComparison.Ordinal))
{
_logger.LogDebug(
"Drift cause: rule_delta (prev={PrevRule}, curr={CurrRule})",
previousRuleHash, currentRuleHash);
return DriftCause.RuleDelta;
}
// Check lattice delta
if (!string.Equals(previousLatticeHash, currentLatticeHash, StringComparison.Ordinal))
{
_logger.LogDebug(
"Drift cause: lattice_delta (prev={PrevLattice}, curr={CurrLattice})",
previousLatticeHash, currentLatticeHash);
return DriftCause.LatticeDelta;
}
// Check reachability delta
if (previousReachable != currentReachable)
{
_logger.LogDebug(
"Drift cause: reachability_delta (prev={PrevReach}, curr={CurrReach})",
previousReachable, currentReachable);
return DriftCause.ReachabilityDelta;
}
// If nothing external changed, it's an engine change or unknown
_logger.LogDebug("Drift cause: other (no external cause identified)");
return DriftCause.Other;
}
/// <summary>
/// Creates a ClassificationChange record for a status transition.
/// </summary>
public ClassificationChange CreateChange(
string artifactDigest,
string vulnId,
string packagePurl,
Guid tenantId,
Guid manifestId,
Guid executionId,
ClassificationStatus previousStatus,
ClassificationStatus newStatus,
DriftCause cause,
IReadOnlyDictionary<string, string>? causeDetail = null)
{
return new ClassificationChange
{
ArtifactDigest = artifactDigest,
VulnId = vulnId,
PackagePurl = packagePurl,
TenantId = tenantId,
ManifestId = manifestId,
ExecutionId = executionId,
PreviousStatus = previousStatus,
NewStatus = newStatus,
Cause = cause,
CauseDetail = causeDetail,
ChangedAt = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Checks if the FN-Drift rate exceeds the threshold.
/// </summary>
/// <param name="summary">The drift summary to check</param>
/// <param name="thresholdPercent">Maximum acceptable FN-Drift rate (default: 5%)</param>
/// <returns>True if drift rate exceeds threshold</returns>
public bool ExceedsThreshold(FnDrift30dSummary summary, decimal thresholdPercent = 5.0m)
{
ArgumentNullException.ThrowIfNull(summary);
var exceeds = summary.FnDriftPercent > thresholdPercent;
if (exceeds)
{
_logger.LogWarning(
"FN-Drift for tenant {TenantId} exceeds threshold: {Percent}% > {Threshold}%",
summary.TenantId, summary.FnDriftPercent, thresholdPercent);
}
return exceeds;
}
}

View File

@@ -0,0 +1,106 @@
using System;
using System.IO;
using System.Text;
namespace StellaOps.Scanner.Core.TrustAnchors;
public sealed class FileSystemPublicKeyLoader : IPublicKeyLoader
{
private static readonly string[] CandidateExtensions =
{
string.Empty,
".pub",
".pem",
".der"
};
public byte[]? LoadKey(string keyId, string? keyDirectory)
{
if (string.IsNullOrWhiteSpace(keyId) || string.IsNullOrWhiteSpace(keyDirectory))
{
return null;
}
if (keyId.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0
|| keyId.Contains(Path.DirectorySeparatorChar)
|| keyId.Contains(Path.AltDirectorySeparatorChar))
{
return null;
}
foreach (var extension in CandidateExtensions)
{
try
{
var path = Path.Combine(keyDirectory, keyId + extension);
if (!File.Exists(path))
{
continue;
}
var bytes = File.ReadAllBytes(path);
return TryParsePemPublicKey(bytes) ?? bytes;
}
catch
{
continue;
}
}
return null;
}
private static byte[]? TryParsePemPublicKey(byte[] bytes)
{
if (bytes.Length == 0)
{
return null;
}
string text;
try
{
text = Encoding.UTF8.GetString(bytes);
}
catch
{
return null;
}
const string Begin = "-----BEGIN PUBLIC KEY-----";
const string End = "-----END PUBLIC KEY-----";
var beginIndex = text.IndexOf(Begin, StringComparison.Ordinal);
if (beginIndex < 0)
{
return null;
}
var endIndex = text.IndexOf(End, StringComparison.Ordinal);
if (endIndex <= beginIndex)
{
return null;
}
var base64 = text
.Substring(beginIndex + Begin.Length, endIndex - (beginIndex + Begin.Length))
.Replace("\r", string.Empty, StringComparison.Ordinal)
.Replace("\n", string.Empty, StringComparison.Ordinal)
.Trim();
if (string.IsNullOrWhiteSpace(base64))
{
return null;
}
try
{
return Convert.FromBase64String(base64);
}
catch
{
return null;
}
}
}

View File

@@ -0,0 +1,7 @@
namespace StellaOps.Scanner.Core.TrustAnchors;
public interface IPublicKeyLoader
{
byte[]? LoadKey(string keyId, string? keyDirectory);
}

View File

@@ -0,0 +1,12 @@
using System.Collections.Generic;
using StellaOps.Scanner.Core.Configuration;
namespace StellaOps.Scanner.Core.TrustAnchors;
public interface ITrustAnchorRegistry
{
TrustAnchorResolution? ResolveForPurl(string purl);
IReadOnlyList<TrustAnchorConfig> GetAllAnchors();
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Core.TrustAnchors;
/// <summary>
/// Matches Package URLs against glob patterns.
/// Supports:
/// - Exact match: "pkg:npm/@scope/package@1.0.0"
/// - Prefix wildcard: "pkg:npm/*"
/// - Infix wildcard: "pkg:maven/org.apache.*"
/// - Universal: "*"
/// </summary>
public sealed class PurlPatternMatcher
{
private readonly string _pattern;
private readonly Regex _regex;
public PurlPatternMatcher(string pattern)
{
if (string.IsNullOrWhiteSpace(pattern))
{
throw new ArgumentException("Pattern cannot be empty.", nameof(pattern));
}
_pattern = pattern.Trim();
_regex = CompilePattern(_pattern);
}
public bool IsMatch(string? purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return false;
}
return _regex.IsMatch(purl);
}
public string Pattern => _pattern;
private static Regex CompilePattern(string pattern)
{
if (pattern == "*")
{
return new Regex("^.*$", RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
}
var escaped = Regex.Escape(pattern);
escaped = escaped.Replace(@"\*", ".*", StringComparison.Ordinal);
return new Regex($"^{escaped}$", RegexOptions.Compiled | RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
}
}

View File

@@ -0,0 +1,205 @@
using System;
using System.Collections.Generic;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Core.Configuration;
namespace StellaOps.Scanner.Core.TrustAnchors;
/// <summary>
/// Registry for trust anchors with PURL-based resolution.
/// Thread-safe and supports runtime reload.
/// </summary>
public sealed class TrustAnchorRegistry : ITrustAnchorRegistry
{
private readonly IOptionsMonitor<OfflineKitOptions> _options;
private readonly IPublicKeyLoader _keyLoader;
private readonly ILogger<TrustAnchorRegistry> _logger;
private readonly TimeProvider _timeProvider;
private IReadOnlyList<CompiledTrustAnchor>? _compiledAnchors;
private readonly object _lock = new();
public TrustAnchorRegistry(
IOptionsMonitor<OfflineKitOptions> options,
IPublicKeyLoader keyLoader,
ILogger<TrustAnchorRegistry> logger,
TimeProvider timeProvider)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_keyLoader = keyLoader ?? throw new ArgumentNullException(nameof(keyLoader));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_options.OnChange(_ => InvalidateCache());
}
public TrustAnchorResolution? ResolveForPurl(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
if (!_options.CurrentValue.Enabled)
{
return null;
}
var anchors = GetCompiledAnchors();
var now = _timeProvider.GetUtcNow();
foreach (var anchor in anchors)
{
if (!anchor.Matcher.IsMatch(purl))
{
continue;
}
if (anchor.Config.ExpiresAt is { } expiresAt && expiresAt < now)
{
_logger.LogWarning("Trust anchor {AnchorId} has expired, skipping.", anchor.Config.AnchorId);
continue;
}
return new TrustAnchorResolution(
AnchorId: anchor.Config.AnchorId,
AllowedKeyIds: anchor.AllowedKeyIds,
MinSignatures: anchor.Config.MinSignatures,
PublicKeys: anchor.LoadedKeys);
}
return null;
}
public IReadOnlyList<TrustAnchorConfig> GetAllAnchors()
=> _options.CurrentValue.TrustAnchors.AsReadOnly();
private IReadOnlyList<CompiledTrustAnchor> GetCompiledAnchors()
{
if (_compiledAnchors is not null)
{
return _compiledAnchors;
}
lock (_lock)
{
if (_compiledAnchors is not null)
{
return _compiledAnchors;
}
var config = _options.CurrentValue;
config.TrustAnchors ??= new List<TrustAnchorConfig>();
var compiled = new List<CompiledTrustAnchor>(config.TrustAnchors.Count);
foreach (var anchor in config.TrustAnchors)
{
try
{
var matcher = new PurlPatternMatcher(anchor.PurlPattern);
var (allowedKeyIds, keys) = LoadKeysForAnchor(anchor, config.TrustRootDirectory);
compiled.Add(new CompiledTrustAnchor(anchor, matcher, allowedKeyIds, keys));
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to compile trust anchor {AnchorId}.", anchor.AnchorId);
}
}
_compiledAnchors = compiled.AsReadOnly();
return _compiledAnchors;
}
}
private (IReadOnlyList<string> AllowedKeyIds, IReadOnlyDictionary<string, byte[]> LoadedKeys) LoadKeysForAnchor(
TrustAnchorConfig anchor,
string? keyDirectory)
{
var normalizedKeyIds = new List<string>(anchor.AllowedKeyIds.Count);
var keys = new Dictionary<string, byte[]>(StringComparer.OrdinalIgnoreCase);
foreach (var configuredKeyId in anchor.AllowedKeyIds)
{
var normalizedKeyId = NormalizeKeyId(configuredKeyId);
if (string.IsNullOrWhiteSpace(normalizedKeyId))
{
continue;
}
normalizedKeyIds.Add(normalizedKeyId);
var keyBytes = _keyLoader.LoadKey(normalizedKeyId, keyDirectory);
if (keyBytes is null)
{
_logger.LogWarning("Key {KeyId} not found for anchor {AnchorId}.", configuredKeyId, anchor.AnchorId);
continue;
}
keys[normalizedKeyId] = keyBytes;
keys[$"sha256:{normalizedKeyId}"] = keyBytes;
}
return (normalizedKeyIds.AsReadOnly(), keys);
}
internal static string NormalizeKeyId(string keyId)
{
if (string.IsNullOrWhiteSpace(keyId))
{
return string.Empty;
}
var trimmed = keyId.Trim();
if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
trimmed = trimmed[7..];
}
trimmed = trimmed.Trim();
if (trimmed.Length == 0)
{
return string.Empty;
}
return LooksLikeHex(trimmed)
? trimmed.ToLowerInvariant()
: trimmed;
}
private static bool LooksLikeHex(string value)
{
foreach (var character in value)
{
var isHex = (character >= '0' && character <= '9')
|| (character >= 'a' && character <= 'f')
|| (character >= 'A' && character <= 'F');
if (!isHex)
{
return false;
}
}
return true;
}
private void InvalidateCache()
{
lock (_lock)
{
_compiledAnchors = null;
}
}
private sealed record CompiledTrustAnchor(
TrustAnchorConfig Config,
PurlPatternMatcher Matcher,
IReadOnlyList<string> AllowedKeyIds,
IReadOnlyDictionary<string, byte[]> LoadedKeys);
}
public sealed record TrustAnchorResolution(
string AnchorId,
IReadOnlyList<string> AllowedKeyIds,
int MinSignatures,
IReadOnlyDictionary<string, byte[]> PublicKeys);

View File

@@ -0,0 +1,122 @@
namespace StellaOps.Scanner.Storage.Models;
/// <summary>
/// Represents a classification status change for FN-Drift tracking.
/// </summary>
public sealed record ClassificationChange
{
public long Id { get; init; }
// Artifact identification
public required string ArtifactDigest { get; init; }
public required string VulnId { get; init; }
public required string PackagePurl { get; init; }
// Scan context
public required Guid TenantId { get; init; }
public required Guid ManifestId { get; init; }
public required Guid ExecutionId { get; init; }
// Status transition
public required ClassificationStatus PreviousStatus { get; init; }
public required ClassificationStatus NewStatus { get; init; }
/// <summary>
/// True if this was a false-negative transition (unaffected/unknown -> affected)
/// </summary>
public bool IsFnTransition =>
PreviousStatus is ClassificationStatus.Unaffected or ClassificationStatus.Unknown
&& NewStatus == ClassificationStatus.Affected;
// Drift cause
public required DriftCause Cause { get; init; }
public IReadOnlyDictionary<string, string>? CauseDetail { get; init; }
// Timestamp
public DateTimeOffset ChangedAt { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Classification status values.
/// </summary>
public enum ClassificationStatus
{
/// <summary>First scan, no previous status</summary>
New,
/// <summary>Confirmed not affected</summary>
Unaffected,
/// <summary>Status unknown/uncertain</summary>
Unknown,
/// <summary>Confirmed affected</summary>
Affected,
/// <summary>Previously affected, now fixed</summary>
Fixed
}
/// <summary>
/// Stratification causes for FN-Drift analysis.
/// </summary>
public enum DriftCause
{
/// <summary>Vulnerability feed updated (NVD, GHSA, OVAL)</summary>
FeedDelta,
/// <summary>Policy rules changed</summary>
RuleDelta,
/// <summary>VEX lattice state changed</summary>
LatticeDelta,
/// <summary>Reachability analysis changed</summary>
ReachabilityDelta,
/// <summary>Scanner engine change (should be ~0)</summary>
Engine,
/// <summary>Other/unknown cause</summary>
Other
}
/// <summary>
/// FN-Drift statistics for a time period.
/// </summary>
public sealed record FnDriftStats
{
public required DateOnly DayBucket { get; init; }
public required Guid TenantId { get; init; }
public required DriftCause Cause { get; init; }
public required int TotalReclassified { get; init; }
public required int FnCount { get; init; }
public required decimal FnDriftPercent { get; init; }
// Stratification counts
public required int FeedDeltaCount { get; init; }
public required int RuleDeltaCount { get; init; }
public required int LatticeDeltaCount { get; init; }
public required int ReachabilityDeltaCount { get; init; }
public required int EngineCount { get; init; }
public required int OtherCount { get; init; }
}
/// <summary>
/// 30-day rolling FN-Drift summary.
/// </summary>
public sealed record FnDrift30dSummary
{
public required Guid TenantId { get; init; }
public required int TotalFnTransitions { get; init; }
public required int TotalEvaluated { get; init; }
public required decimal FnDriftPercent { get; init; }
// Stratification breakdown
public required int FeedCaused { get; init; }
public required int RuleCaused { get; init; }
public required int LatticeCaused { get; init; }
public required int ReachabilityCaused { get; init; }
public required int EngineCaused { get; init; }
}

View File

@@ -0,0 +1,107 @@
-- Classification history for FN-Drift tracking
-- Per advisory section 13.2
CREATE TABLE IF NOT EXISTS classification_history (
id BIGSERIAL PRIMARY KEY,
-- Artifact identification
artifact_digest TEXT NOT NULL,
vuln_id TEXT NOT NULL,
package_purl TEXT NOT NULL,
-- Scan context
tenant_id UUID NOT NULL,
manifest_id UUID NOT NULL,
execution_id UUID NOT NULL,
-- Status transition
previous_status TEXT NOT NULL, -- 'new', 'unaffected', 'unknown', 'affected', 'fixed'
new_status TEXT NOT NULL,
is_fn_transition BOOLEAN NOT NULL GENERATED ALWAYS AS (
previous_status IN ('unaffected', 'unknown') AND new_status = 'affected'
) STORED,
-- Drift cause classification
cause TEXT NOT NULL, -- 'feed_delta', 'rule_delta', 'lattice_delta', 'reachability_delta', 'engine', 'other'
cause_detail JSONB, -- Additional context (e.g., feed version, rule hash)
-- Timestamps
changed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT valid_previous_status CHECK (previous_status IN ('new', 'unaffected', 'unknown', 'affected', 'fixed')),
CONSTRAINT valid_new_status CHECK (new_status IN ('unaffected', 'unknown', 'affected', 'fixed')),
CONSTRAINT valid_cause CHECK (cause IN ('feed_delta', 'rule_delta', 'lattice_delta', 'reachability_delta', 'engine', 'other'))
);
-- Indexes for common query patterns
CREATE INDEX IF NOT EXISTS idx_classification_history_artifact ON classification_history(artifact_digest);
CREATE INDEX IF NOT EXISTS idx_classification_history_tenant ON classification_history(tenant_id);
CREATE INDEX IF NOT EXISTS idx_classification_history_changed_at ON classification_history(changed_at);
CREATE INDEX IF NOT EXISTS idx_classification_history_fn_transition ON classification_history(is_fn_transition) WHERE is_fn_transition = TRUE;
CREATE INDEX IF NOT EXISTS idx_classification_history_cause ON classification_history(cause);
CREATE INDEX IF NOT EXISTS idx_classification_history_vuln ON classification_history(vuln_id);
COMMENT ON TABLE classification_history IS 'Tracks vulnerability classification changes for FN-Drift analysis';
COMMENT ON COLUMN classification_history.is_fn_transition IS 'True if this was a false-negative transition (unaffected/unknown -> affected)';
COMMENT ON COLUMN classification_history.cause IS 'Stratification cause: feed_delta, rule_delta, lattice_delta, reachability_delta, engine, other';
-- Materialized view for FN-Drift statistics
-- Aggregates classification_history for dashboard queries
CREATE MATERIALIZED VIEW IF NOT EXISTS fn_drift_stats AS
SELECT
date_trunc('day', changed_at)::date AS day_bucket,
tenant_id,
cause,
-- Total reclassifications
COUNT(*) AS total_reclassified,
-- FN transitions (unaffected/unknown -> affected)
COUNT(*) FILTER (WHERE is_fn_transition) AS fn_count,
-- FN-Drift rate
ROUND(
(COUNT(*) FILTER (WHERE is_fn_transition)::numeric /
NULLIF(COUNT(*), 0)) * 100, 4
) AS fn_drift_percent,
-- Stratification counts
COUNT(*) FILTER (WHERE cause = 'feed_delta') AS feed_delta_count,
COUNT(*) FILTER (WHERE cause = 'rule_delta') AS rule_delta_count,
COUNT(*) FILTER (WHERE cause = 'lattice_delta') AS lattice_delta_count,
COUNT(*) FILTER (WHERE cause = 'reachability_delta') AS reachability_delta_count,
COUNT(*) FILTER (WHERE cause = 'engine') AS engine_count,
COUNT(*) FILTER (WHERE cause = 'other') AS other_count
FROM classification_history
GROUP BY date_trunc('day', changed_at)::date, tenant_id, cause;
-- Index for efficient queries
CREATE UNIQUE INDEX IF NOT EXISTS idx_fn_drift_stats_pk ON fn_drift_stats(day_bucket, tenant_id, cause);
CREATE INDEX IF NOT EXISTS idx_fn_drift_stats_tenant ON fn_drift_stats(tenant_id);
-- View for 30-day rolling FN-Drift (per advisory definition)
CREATE OR REPLACE VIEW fn_drift_30d AS
SELECT
tenant_id,
SUM(fn_count)::int AS total_fn_transitions,
SUM(total_reclassified)::int AS total_evaluated,
ROUND(
(SUM(fn_count)::numeric / NULLIF(SUM(total_reclassified), 0)) * 100, 4
) AS fn_drift_percent,
-- Stratification breakdown
SUM(feed_delta_count)::int AS feed_caused,
SUM(rule_delta_count)::int AS rule_caused,
SUM(lattice_delta_count)::int AS lattice_caused,
SUM(reachability_delta_count)::int AS reachability_caused,
SUM(engine_count)::int AS engine_caused
FROM fn_drift_stats
WHERE day_bucket >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY tenant_id;
COMMENT ON MATERIALIZED VIEW fn_drift_stats IS 'Daily FN-Drift statistics, refresh periodically';
COMMENT ON VIEW fn_drift_30d IS 'Rolling 30-day FN-Drift rate per tenant';

View File

@@ -4,4 +4,5 @@ internal static class MigrationIds
{
public const string CreateTables = "001_create_tables.sql";
public const string ProofSpineTables = "002_proof_spine_tables.sql";
public const string ClassificationHistory = "003_classification_history.sql";
}

View File

@@ -0,0 +1,323 @@
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Infrastructure.Postgres.Repositories;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Postgres;
namespace StellaOps.Scanner.Storage.Repositories;
/// <summary>
/// PostgreSQL implementation of classification history repository.
/// </summary>
public sealed class ClassificationHistoryRepository : RepositoryBase<ScannerDataSource>, IClassificationHistoryRepository
{
private const string Tenant = "";
private string Table => $"{SchemaName}.classification_history";
private string DriftStatsView => $"{SchemaName}.fn_drift_stats";
private string Drift30dView => $"{SchemaName}.fn_drift_30d";
private string SchemaName => DataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
public ClassificationHistoryRepository(
ScannerDataSource dataSource,
ILogger<ClassificationHistoryRepository> logger)
: base(dataSource, logger)
{
}
public async Task InsertAsync(ClassificationChange change, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(change);
var sql = $"""
INSERT INTO {Table}
(artifact_digest, vuln_id, package_purl, tenant_id, manifest_id, execution_id,
previous_status, new_status, cause, cause_detail, changed_at)
VALUES
(@artifact_digest, @vuln_id, @package_purl, @tenant_id, @manifest_id, @execution_id,
@previous_status, @new_status, @cause, @cause_detail::jsonb, @changed_at)
""";
await ExecuteAsync(
Tenant,
sql,
cmd => AddChangeParameters(cmd, change),
cancellationToken);
}
public async Task InsertBatchAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(changes);
var changeList = changes.ToList();
if (changeList.Count == 0) return;
// Use batch insert for better performance
foreach (var change in changeList)
{
await InsertAsync(change, cancellationToken);
}
}
public Task<IReadOnlyList<ClassificationChange>> GetChangesAsync(
Guid tenantId,
DateTimeOffset since,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT id, artifact_digest, vuln_id, package_purl, tenant_id, manifest_id, execution_id,
previous_status, new_status, is_fn_transition, cause, cause_detail, changed_at
FROM {Table}
WHERE tenant_id = @tenant_id AND changed_at >= @since
ORDER BY changed_at DESC
""";
return QueryAsync(
Tenant,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "since", since);
},
MapChange,
cancellationToken);
}
public Task<IReadOnlyList<ClassificationChange>> GetByArtifactAsync(
string artifactDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(artifactDigest);
var sql = $"""
SELECT id, artifact_digest, vuln_id, package_purl, tenant_id, manifest_id, execution_id,
previous_status, new_status, is_fn_transition, cause, cause_detail, changed_at
FROM {Table}
WHERE artifact_digest = @artifact_digest
ORDER BY changed_at DESC
""";
return QueryAsync(
Tenant,
sql,
cmd => AddParameter(cmd, "artifact_digest", artifactDigest),
MapChange,
cancellationToken);
}
public Task<IReadOnlyList<ClassificationChange>> GetByVulnIdAsync(
string vulnId,
Guid? tenantId = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnId);
var sql = tenantId.HasValue
? $"""
SELECT id, artifact_digest, vuln_id, package_purl, tenant_id, manifest_id, execution_id,
previous_status, new_status, is_fn_transition, cause, cause_detail, changed_at
FROM {Table}
WHERE vuln_id = @vuln_id AND tenant_id = @tenant_id
ORDER BY changed_at DESC
"""
: $"""
SELECT id, artifact_digest, vuln_id, package_purl, tenant_id, manifest_id, execution_id,
previous_status, new_status, is_fn_transition, cause, cause_detail, changed_at
FROM {Table}
WHERE vuln_id = @vuln_id
ORDER BY changed_at DESC
""";
return QueryAsync(
Tenant,
sql,
cmd =>
{
AddParameter(cmd, "vuln_id", vulnId);
if (tenantId.HasValue)
AddParameter(cmd, "tenant_id", tenantId.Value);
},
MapChange,
cancellationToken);
}
public Task<IReadOnlyList<FnDriftStats>> GetDriftStatsAsync(
Guid tenantId,
DateOnly fromDate,
DateOnly toDate,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT day_bucket, tenant_id, cause, total_reclassified, fn_count, fn_drift_percent,
feed_delta_count, rule_delta_count, lattice_delta_count, reachability_delta_count,
engine_count, other_count
FROM {DriftStatsView}
WHERE tenant_id = @tenant_id AND day_bucket >= @from_date AND day_bucket <= @to_date
ORDER BY day_bucket DESC
""";
return QueryAsync(
Tenant,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "from_date", fromDate);
AddParameter(cmd, "to_date", toDate);
},
MapDriftStats,
cancellationToken);
}
public Task<FnDrift30dSummary?> GetDrift30dSummaryAsync(
Guid tenantId,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT tenant_id, total_fn_transitions, total_evaluated, fn_drift_percent,
feed_caused, rule_caused, lattice_caused, reachability_caused, engine_caused
FROM {Drift30dView}
WHERE tenant_id = @tenant_id
""";
return QuerySingleOrDefaultAsync(
Tenant,
sql,
cmd => AddParameter(cmd, "tenant_id", tenantId),
MapDrift30dSummary,
cancellationToken);
}
public async Task RefreshDriftStatsAsync(CancellationToken cancellationToken = default)
{
var sql = $"REFRESH MATERIALIZED VIEW CONCURRENTLY {DriftStatsView}";
await ExecuteAsync(
Tenant,
sql,
static _ => { },
cancellationToken);
}
private void AddChangeParameters(NpgsqlCommand cmd, ClassificationChange change)
{
AddParameter(cmd, "artifact_digest", change.ArtifactDigest);
AddParameter(cmd, "vuln_id", change.VulnId);
AddParameter(cmd, "package_purl", change.PackagePurl);
AddParameter(cmd, "tenant_id", change.TenantId);
AddParameter(cmd, "manifest_id", change.ManifestId);
AddParameter(cmd, "execution_id", change.ExecutionId);
AddParameter(cmd, "previous_status", MapStatusToString(change.PreviousStatus));
AddParameter(cmd, "new_status", MapStatusToString(change.NewStatus));
AddParameter(cmd, "cause", MapCauseToString(change.Cause));
AddParameter(cmd, "cause_detail", change.CauseDetail != null
? JsonSerializer.Serialize(change.CauseDetail, JsonOptions)
: null);
AddParameter(cmd, "changed_at", change.ChangedAt);
}
private static ClassificationChange MapChange(NpgsqlDataReader reader)
{
var causeDetailJson = reader.IsDBNull(11) ? null : reader.GetString(11);
var causeDetail = causeDetailJson != null
? JsonSerializer.Deserialize<Dictionary<string, string>>(causeDetailJson, JsonOptions)
: null;
return new ClassificationChange
{
Id = reader.GetInt64(0),
ArtifactDigest = reader.GetString(1),
VulnId = reader.GetString(2),
PackagePurl = reader.GetString(3),
TenantId = reader.GetGuid(4),
ManifestId = reader.GetGuid(5),
ExecutionId = reader.GetGuid(6),
PreviousStatus = MapStringToStatus(reader.GetString(7)),
NewStatus = MapStringToStatus(reader.GetString(8)),
// is_fn_transition is at index 9, but we compute it from PreviousStatus/NewStatus
Cause = MapStringToCause(reader.GetString(10)),
CauseDetail = causeDetail,
ChangedAt = reader.GetDateTime(12)
};
}
private static FnDriftStats MapDriftStats(NpgsqlDataReader reader)
{
return new FnDriftStats
{
DayBucket = DateOnly.FromDateTime(reader.GetDateTime(0)),
TenantId = reader.GetGuid(1),
Cause = MapStringToCause(reader.GetString(2)),
TotalReclassified = reader.GetInt32(3),
FnCount = reader.GetInt32(4),
FnDriftPercent = reader.GetDecimal(5),
FeedDeltaCount = reader.GetInt32(6),
RuleDeltaCount = reader.GetInt32(7),
LatticeDeltaCount = reader.GetInt32(8),
ReachabilityDeltaCount = reader.GetInt32(9),
EngineCount = reader.GetInt32(10),
OtherCount = reader.GetInt32(11)
};
}
private static FnDrift30dSummary MapDrift30dSummary(NpgsqlDataReader reader)
{
return new FnDrift30dSummary
{
TenantId = reader.GetGuid(0),
TotalFnTransitions = reader.GetInt32(1),
TotalEvaluated = reader.GetInt32(2),
FnDriftPercent = reader.IsDBNull(3) ? 0 : reader.GetDecimal(3),
FeedCaused = reader.GetInt32(4),
RuleCaused = reader.GetInt32(5),
LatticeCaused = reader.GetInt32(6),
ReachabilityCaused = reader.GetInt32(7),
EngineCaused = reader.GetInt32(8)
};
}
private static string MapStatusToString(ClassificationStatus status) => status switch
{
ClassificationStatus.New => "new",
ClassificationStatus.Unaffected => "unaffected",
ClassificationStatus.Unknown => "unknown",
ClassificationStatus.Affected => "affected",
ClassificationStatus.Fixed => "fixed",
_ => throw new ArgumentOutOfRangeException(nameof(status))
};
private static ClassificationStatus MapStringToStatus(string status) => status switch
{
"new" => ClassificationStatus.New,
"unaffected" => ClassificationStatus.Unaffected,
"unknown" => ClassificationStatus.Unknown,
"affected" => ClassificationStatus.Affected,
"fixed" => ClassificationStatus.Fixed,
_ => throw new ArgumentOutOfRangeException(nameof(status))
};
private static string MapCauseToString(DriftCause cause) => cause switch
{
DriftCause.FeedDelta => "feed_delta",
DriftCause.RuleDelta => "rule_delta",
DriftCause.LatticeDelta => "lattice_delta",
DriftCause.ReachabilityDelta => "reachability_delta",
DriftCause.Engine => "engine",
DriftCause.Other => "other",
_ => throw new ArgumentOutOfRangeException(nameof(cause))
};
private static DriftCause MapStringToCause(string cause) => cause switch
{
"feed_delta" => DriftCause.FeedDelta,
"rule_delta" => DriftCause.RuleDelta,
"lattice_delta" => DriftCause.LatticeDelta,
"reachability_delta" => DriftCause.ReachabilityDelta,
"engine" => DriftCause.Engine,
"other" => DriftCause.Other,
_ => throw new ArgumentOutOfRangeException(nameof(cause))
};
}

View File

@@ -0,0 +1,63 @@
using StellaOps.Scanner.Storage.Models;
namespace StellaOps.Scanner.Storage.Repositories;
/// <summary>
/// Repository interface for classification history operations.
/// </summary>
public interface IClassificationHistoryRepository
{
/// <summary>
/// Records a classification status change.
/// </summary>
Task InsertAsync(ClassificationChange change, CancellationToken cancellationToken = default);
/// <summary>
/// Records multiple classification changes in a batch.
/// </summary>
Task InsertBatchAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default);
/// <summary>
/// Gets classification changes for a tenant since a given date.
/// </summary>
Task<IReadOnlyList<ClassificationChange>> GetChangesAsync(
Guid tenantId,
DateTimeOffset since,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets classification changes for a specific artifact.
/// </summary>
Task<IReadOnlyList<ClassificationChange>> GetByArtifactAsync(
string artifactDigest,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets classification changes for a specific vulnerability.
/// </summary>
Task<IReadOnlyList<ClassificationChange>> GetByVulnIdAsync(
string vulnId,
Guid? tenantId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets FN-Drift statistics from the materialized view.
/// </summary>
Task<IReadOnlyList<FnDriftStats>> GetDriftStatsAsync(
Guid tenantId,
DateOnly fromDate,
DateOnly toDate,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets 30-day rolling FN-Drift summary for a tenant.
/// </summary>
Task<FnDrift30dSummary?> GetDrift30dSummaryAsync(
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Refreshes the FN-Drift statistics materialized view.
/// </summary>
Task RefreshDriftStatsAsync(CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,27 @@
[
{
"analyzerId": "node",
"componentKey": "observation::node-phase22",
"name": "Node Observation (Phase 22)",
"type": "node-observation",
"usedByEntrypoint": false,
"capabilities": [],
"threatVectors": [],
"metadata": {
"node.observation.components": "2",
"node.observation.edges": "2",
"node.observation.entrypoints": "0",
"node.observation.native": "1",
"node.observation.wasm": "1"
},
"evidence": [
{
"kind": "derived",
"source": "node.observation",
"locator": "phase22.ndjson",
"value": "{\u0022type\u0022:\u0022component\u0022,\u0022componentType\u0022:\u0022native\u0022,\u0022path\u0022:\u0022/native/addon.node\u0022,\u0022reason\u0022:\u0022native-addon-file\u0022,\u0022confidence\u0022:0.82,\u0022resolverTrace\u0022:[\u0022file:/native/addon.node\u0022],\u0022arch\u0022:\u0022x86_64\u0022,\u0022platform\u0022:\u0022linux\u0022}\r\n{\u0022type\u0022:\u0022component\u0022,\u0022componentType\u0022:\u0022wasm\u0022,\u0022path\u0022:\u0022/pkg/pkg.wasm\u0022,\u0022reason\u0022:\u0022wasm-file\u0022,\u0022confidence\u0022:0.8,\u0022resolverTrace\u0022:[\u0022file:/pkg/pkg.wasm\u0022]}\r\n{\u0022type\u0022:\u0022edge\u0022,\u0022edgeType\u0022:\u0022wasm\u0022,\u0022from\u0022:\u0022/src/app.js\u0022,\u0022to\u0022:\u0022/src/pkg/pkg.wasm\u0022,\u0022reason\u0022:\u0022wasm-import\u0022,\u0022confidence\u0022:0.74,\u0022resolverTrace\u0022:[\u0022source:/src/app.js\u0022,\u0022call:WebAssembly.instantiate(\\u0027./pkg/pkg.wasm\\u0027)\u0022]}\r\n{\u0022type\u0022:\u0022edge\u0022,\u0022edgeType\u0022:\u0022capability\u0022,\u0022from\u0022:\u0022/src/app.js\u0022,\u0022to\u0022:\u0022child_process.execFile\u0022,\u0022reason\u0022:\u0022capability-child-process\u0022,\u0022confidence\u0022:0.7,\u0022resolverTrace\u0022:[\u0022source:/src/app.js\u0022,\u0022call:child_process.execFile\u0022]}",
"sha256": "1329f1c41716d8430b5bdb6d02d1d5f2be1be80877ac15a7e72d3a079fffa4fb"
}
]
}
]

View File

@@ -0,0 +1,165 @@
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Core.Configuration;
using Xunit;
namespace StellaOps.Scanner.Core.Tests;
public sealed class OfflineKitOptionsValidatorTests
{
[Fact]
public void Validate_WhenDisabled_SucceedsEvenWithDefaults()
{
var validator = new OfflineKitOptionsValidator();
var result = validator.Validate(null, new OfflineKitOptions());
Assert.Equal(ValidateOptionsResult.Success, result);
}
[Fact]
public void Validate_WhenEnabled_RequiresRekorSnapshotDirectory()
{
var validator = new OfflineKitOptionsValidator();
var options = new OfflineKitOptions
{
Enabled = true,
TrustAnchors = new List<TrustAnchorConfig>()
};
var result = validator.Validate(null, options);
Assert.False(result.Succeeded);
Assert.NotNull(result.Failures);
Assert.Contains(result.Failures!, message => message.Contains("RekorSnapshotDirectory", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public void Validate_WhenEnabled_RequiresTrustRootDirectoryWhenAnchorsPresent()
{
var validator = new OfflineKitOptionsValidator();
var options = new OfflineKitOptions
{
Enabled = true,
RekorOfflineMode = false,
TrustAnchors = new List<TrustAnchorConfig>
{
new()
{
AnchorId = "default",
PurlPattern = "*",
AllowedKeyIds = new List<string> { "sha256:abcdef" }
}
}
};
var result = validator.Validate(null, options);
Assert.False(result.Succeeded);
Assert.NotNull(result.Failures);
Assert.Contains(result.Failures!, message => message.Contains("TrustRootDirectory", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public void Validate_WhenEnabled_WithMinimalValidConfig_Succeeds()
{
var validator = new OfflineKitOptionsValidator();
var trustRootDirectory = CreateTempDirectory("offline-kit-trust-roots");
var rekorSnapshotDirectory = CreateTempDirectory("offline-kit-rekor");
try
{
var options = new OfflineKitOptions
{
Enabled = true,
RequireDsse = true,
RekorOfflineMode = true,
TrustRootDirectory = trustRootDirectory,
RekorSnapshotDirectory = rekorSnapshotDirectory,
TrustAnchors = new List<TrustAnchorConfig>
{
new()
{
AnchorId = "default",
PurlPattern = "*",
AllowedKeyIds = new List<string> { "sha256:abcdef" },
MinSignatures = 1
}
}
};
var result = validator.Validate(null, options);
Assert.True(result.Succeeded);
}
finally
{
TryDeleteDirectory(trustRootDirectory);
TryDeleteDirectory(rekorSnapshotDirectory);
}
}
[Fact]
public void Validate_WhenEnabled_DetectsDuplicateAnchorIds()
{
var validator = new OfflineKitOptionsValidator();
var trustRootDirectory = CreateTempDirectory("offline-kit-trust-roots");
var rekorSnapshotDirectory = CreateTempDirectory("offline-kit-rekor");
try
{
var options = new OfflineKitOptions
{
Enabled = true,
RekorOfflineMode = true,
TrustRootDirectory = trustRootDirectory,
RekorSnapshotDirectory = rekorSnapshotDirectory,
TrustAnchors = new List<TrustAnchorConfig>
{
new()
{
AnchorId = "duplicate",
PurlPattern = "*",
AllowedKeyIds = new List<string> { "sha256:aaaa" },
},
new()
{
AnchorId = "DUPLICATE",
PurlPattern = "*",
AllowedKeyIds = new List<string> { "sha256:bbbb" },
}
}
};
var result = validator.Validate(null, options);
Assert.False(result.Succeeded);
Assert.NotNull(result.Failures);
Assert.Contains(result.Failures!, message => message.Contains("Duplicate", StringComparison.OrdinalIgnoreCase));
}
finally
{
TryDeleteDirectory(trustRootDirectory);
TryDeleteDirectory(rekorSnapshotDirectory);
}
}
private static string CreateTempDirectory(string prefix)
{
var path = Path.Combine(Path.GetTempPath(), $"{prefix}-{Guid.NewGuid():N}");
Directory.CreateDirectory(path);
return path;
}
private static void TryDeleteDirectory(string path)
{
try
{
if (Directory.Exists(path))
{
Directory.Delete(path, recursive: true);
}
}
catch
{
}
}
}

Some files were not shown because too many files have changed in this diff Show More