up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-28 00:45:16 +02:00
parent 3b96b2e3ea
commit 1c6730a1d2
95 changed files with 14504 additions and 463 deletions

View File

@@ -0,0 +1,143 @@
using System.Collections.Immutable;
using StellaOps.Policy.Engine.Evaluation;
namespace StellaOps.Policy.Engine.Caching;
/// <summary>
/// Interface for policy evaluation result caching.
/// Supports deterministic caching with Redis and in-memory fallback.
/// </summary>
public interface IPolicyEvaluationCache
{
/// <summary>
/// Gets a cached evaluation result.
/// </summary>
Task<PolicyEvaluationCacheResult> GetAsync(
PolicyEvaluationCacheKey key,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets multiple cached evaluation results.
/// </summary>
Task<PolicyEvaluationCacheBatch> GetBatchAsync(
IReadOnlyList<PolicyEvaluationCacheKey> keys,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets a cached evaluation result.
/// </summary>
Task SetAsync(
PolicyEvaluationCacheKey key,
PolicyEvaluationCacheEntry entry,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets multiple cached evaluation results.
/// </summary>
Task SetBatchAsync(
IReadOnlyDictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry> entries,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates a cached result.
/// </summary>
Task InvalidateAsync(
PolicyEvaluationCacheKey key,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates all cached results for a policy digest.
/// </summary>
Task InvalidateByPolicyDigestAsync(
string policyDigest,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets cache statistics.
/// </summary>
PolicyEvaluationCacheStats GetStats();
}
/// <summary>
/// Key for policy evaluation cache lookups.
/// </summary>
public sealed record PolicyEvaluationCacheKey(
string PolicyDigest,
string SubjectDigest,
string ContextDigest)
{
public string ToCacheKey() => $"pe:{PolicyDigest}:{SubjectDigest}:{ContextDigest}";
public static PolicyEvaluationCacheKey Create(
string policyDigest,
string subjectDigest,
string contextDigest)
{
return new PolicyEvaluationCacheKey(
policyDigest ?? throw new ArgumentNullException(nameof(policyDigest)),
subjectDigest ?? throw new ArgumentNullException(nameof(subjectDigest)),
contextDigest ?? throw new ArgumentNullException(nameof(contextDigest)));
}
}
/// <summary>
/// Cached evaluation entry.
/// </summary>
public sealed record PolicyEvaluationCacheEntry(
string Status,
string? Severity,
string? RuleName,
int? Priority,
ImmutableDictionary<string, string> Annotations,
ImmutableArray<string> Warnings,
string? ExceptionId,
string CorrelationId,
DateTimeOffset EvaluatedAt,
DateTimeOffset ExpiresAt);
/// <summary>
/// Result of a cache lookup.
/// </summary>
public sealed record PolicyEvaluationCacheResult(
PolicyEvaluationCacheEntry? Entry,
bool CacheHit,
CacheSource Source);
/// <summary>
/// Source of cached data.
/// </summary>
public enum CacheSource
{
None,
InMemory,
Redis
}
/// <summary>
/// Batch result of cache lookups.
/// </summary>
public sealed record PolicyEvaluationCacheBatch
{
public required IReadOnlyDictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry> Found { get; init; }
public required IReadOnlyList<PolicyEvaluationCacheKey> NotFound { get; init; }
public int CacheHits { get; init; }
public int CacheMisses { get; init; }
public int RedisHits { get; init; }
public int InMemoryHits { get; init; }
}
/// <summary>
/// Cache statistics.
/// </summary>
public sealed record PolicyEvaluationCacheStats
{
public long TotalRequests { get; init; }
public long CacheHits { get; init; }
public long CacheMisses { get; init; }
public long RedisHits { get; init; }
public long InMemoryHits { get; init; }
public long RedisFallbacks { get; init; }
public double HitRatio => TotalRequests > 0 ? (double)CacheHits / TotalRequests : 0;
public long ItemCount { get; init; }
public long EvictionCount { get; init; }
}

View File

@@ -0,0 +1,271 @@
using System.Collections.Concurrent;
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Policy.Engine.Options;
namespace StellaOps.Policy.Engine.Caching;
/// <summary>
/// In-memory implementation of policy evaluation cache.
/// Uses time-based eviction with configurable TTL.
/// </summary>
public sealed class InMemoryPolicyEvaluationCache : IPolicyEvaluationCache
{
private readonly ConcurrentDictionary<string, CacheItem> _cache;
private readonly TimeProvider _timeProvider;
private readonly ILogger<InMemoryPolicyEvaluationCache> _logger;
private readonly TimeSpan _defaultTtl;
private readonly int _maxItems;
private long _totalRequests;
private long _cacheHits;
private long _cacheMisses;
private long _evictionCount;
public InMemoryPolicyEvaluationCache(
ILogger<InMemoryPolicyEvaluationCache> logger,
TimeProvider timeProvider,
IOptions<PolicyEngineOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_cache = new ConcurrentDictionary<string, CacheItem>(StringComparer.Ordinal);
var cacheOptions = options?.Value.EvaluationCache ?? new PolicyEvaluationCacheOptions();
_defaultTtl = TimeSpan.FromMinutes(cacheOptions.DefaultTtlMinutes);
_maxItems = cacheOptions.MaxItems;
}
public Task<PolicyEvaluationCacheResult> GetAsync(
PolicyEvaluationCacheKey key,
CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref _totalRequests);
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
if (_cache.TryGetValue(cacheKey, out var item) && item.ExpiresAt > now)
{
Interlocked.Increment(ref _cacheHits);
return Task.FromResult(new PolicyEvaluationCacheResult(item.Entry, true, CacheSource.InMemory));
}
Interlocked.Increment(ref _cacheMisses);
// Remove expired entry if present
if (item != null)
{
_cache.TryRemove(cacheKey, out _);
}
return Task.FromResult(new PolicyEvaluationCacheResult(null, false, CacheSource.None));
}
public async Task<PolicyEvaluationCacheBatch> GetBatchAsync(
IReadOnlyList<PolicyEvaluationCacheKey> keys,
CancellationToken cancellationToken = default)
{
var found = new Dictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry>();
var notFound = new List<PolicyEvaluationCacheKey>();
var hits = 0;
var misses = 0;
foreach (var key in keys)
{
var result = await GetAsync(key, cancellationToken).ConfigureAwait(false);
if (result.Entry != null)
{
found[key] = result.Entry;
hits++;
}
else
{
notFound.Add(key);
misses++;
}
}
return new PolicyEvaluationCacheBatch
{
Found = found,
NotFound = notFound,
CacheHits = hits,
CacheMisses = misses,
InMemoryHits = hits,
RedisHits = 0,
};
}
public Task SetAsync(
PolicyEvaluationCacheKey key,
PolicyEvaluationCacheEntry entry,
CancellationToken cancellationToken = default)
{
EnsureCapacity();
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
var expiresAt = entry.ExpiresAt > now ? entry.ExpiresAt : now.Add(_defaultTtl);
var item = new CacheItem(entry, expiresAt);
_cache[cacheKey] = item;
return Task.CompletedTask;
}
public Task SetBatchAsync(
IReadOnlyDictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry> entries,
CancellationToken cancellationToken = default)
{
EnsureCapacity(entries.Count);
var now = _timeProvider.GetUtcNow();
foreach (var (key, entry) in entries)
{
var cacheKey = key.ToCacheKey();
var expiresAt = entry.ExpiresAt > now ? entry.ExpiresAt : now.Add(_defaultTtl);
var item = new CacheItem(entry, expiresAt);
_cache[cacheKey] = item;
}
return Task.CompletedTask;
}
public Task InvalidateAsync(
PolicyEvaluationCacheKey key,
CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
_cache.TryRemove(cacheKey, out _);
return Task.CompletedTask;
}
public Task InvalidateByPolicyDigestAsync(
string policyDigest,
CancellationToken cancellationToken = default)
{
var prefix = $"pe:{policyDigest}:";
var keysToRemove = _cache.Keys.Where(k => k.StartsWith(prefix, StringComparison.Ordinal)).ToList();
foreach (var key in keysToRemove)
{
_cache.TryRemove(key, out _);
}
_logger.LogDebug("Invalidated {Count} cache entries for policy digest {Digest}", keysToRemove.Count, policyDigest);
return Task.CompletedTask;
}
public PolicyEvaluationCacheStats GetStats()
{
return new PolicyEvaluationCacheStats
{
TotalRequests = Interlocked.Read(ref _totalRequests),
CacheHits = Interlocked.Read(ref _cacheHits),
CacheMisses = Interlocked.Read(ref _cacheMisses),
InMemoryHits = Interlocked.Read(ref _cacheHits),
RedisHits = 0,
RedisFallbacks = 0,
ItemCount = _cache.Count,
EvictionCount = Interlocked.Read(ref _evictionCount),
};
}
private void EnsureCapacity(int additionalItems = 1)
{
if (_cache.Count + additionalItems <= _maxItems)
{
return;
}
var now = _timeProvider.GetUtcNow();
var itemsToRemove = _cache.Count + additionalItems - _maxItems + (_maxItems / 10);
// First, remove expired items
var expiredKeys = _cache
.Where(kvp => kvp.Value.ExpiresAt <= now)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in expiredKeys)
{
if (_cache.TryRemove(key, out _))
{
Interlocked.Increment(ref _evictionCount);
itemsToRemove--;
}
}
if (itemsToRemove <= 0)
{
return;
}
// Then, remove oldest items by expiration time
var oldestKeys = _cache
.OrderBy(kvp => kvp.Value.ExpiresAt)
.Take(itemsToRemove)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in oldestKeys)
{
if (_cache.TryRemove(key, out _))
{
Interlocked.Increment(ref _evictionCount);
}
}
_logger.LogDebug(
"Evicted {EvictedCount} evaluation cache entries (expired: {ExpiredCount}, oldest: {OldestCount})",
expiredKeys.Count + oldestKeys.Count,
expiredKeys.Count,
oldestKeys.Count);
}
private sealed record CacheItem(PolicyEvaluationCacheEntry Entry, DateTimeOffset ExpiresAt);
}
/// <summary>
/// Configuration options for policy evaluation cache.
/// </summary>
public sealed class PolicyEvaluationCacheOptions
{
/// <summary>
/// Default TTL for cache entries in minutes.
/// </summary>
public int DefaultTtlMinutes { get; set; } = 30;
/// <summary>
/// Maximum number of items in the in-memory cache.
/// </summary>
public int MaxItems { get; set; } = 50000;
/// <summary>
/// Whether to enable Redis as a distributed cache layer.
/// </summary>
public bool EnableRedis { get; set; }
/// <summary>
/// Redis connection string.
/// </summary>
public string? RedisConnectionString { get; set; }
/// <summary>
/// Redis key prefix for policy evaluations.
/// </summary>
public string RedisKeyPrefix { get; set; } = "stellaops:pe:";
/// <summary>
/// Whether to use hybrid mode (in-memory + Redis).
/// </summary>
public bool HybridMode { get; set; } = true;
/// <summary>
/// Timeout for Redis operations in milliseconds.
/// </summary>
public int RedisTimeoutMs { get; set; } = 100;
}

View File

@@ -6,7 +6,18 @@ namespace StellaOps.Policy.Engine.Domain;
public sealed record PolicyBundleRequest(
[property: JsonPropertyName("dsl")] PolicyDslPayload Dsl,
[property: JsonPropertyName("signingKeyId")] string? SigningKeyId);
[property: JsonPropertyName("signingKeyId")] string? SigningKeyId,
[property: JsonPropertyName("provenance")] PolicyProvenanceInput? Provenance = null);
/// <summary>
/// Input provenance information for policy compilation.
/// </summary>
public sealed record PolicyProvenanceInput(
[property: JsonPropertyName("sourceType")] string SourceType,
[property: JsonPropertyName("sourceUrl")] string? SourceUrl = null,
[property: JsonPropertyName("submitter")] string? Submitter = null,
[property: JsonPropertyName("commitSha")] string? CommitSha = null,
[property: JsonPropertyName("branch")] string? Branch = null);
public sealed record PolicyBundleResponse(
[property: JsonPropertyName("success")] bool Success,
@@ -14,4 +25,18 @@ public sealed record PolicyBundleResponse(
[property: JsonPropertyName("signature")] string? Signature,
[property: JsonPropertyName("sizeBytes")] int SizeBytes,
[property: JsonPropertyName("createdAt")] DateTimeOffset? CreatedAt,
[property: JsonPropertyName("diagnostics")] ImmutableArray<PolicyIssue> Diagnostics);
[property: JsonPropertyName("diagnostics")] ImmutableArray<PolicyIssue> Diagnostics,
[property: JsonPropertyName("aocMetadata")] PolicyAocMetadataResponse? AocMetadata = null);
/// <summary>
/// AOC metadata returned from policy compilation.
/// </summary>
public sealed record PolicyAocMetadataResponse(
[property: JsonPropertyName("compilationId")] string CompilationId,
[property: JsonPropertyName("compilerVersion")] string CompilerVersion,
[property: JsonPropertyName("compiledAt")] DateTimeOffset CompiledAt,
[property: JsonPropertyName("sourceDigest")] string SourceDigest,
[property: JsonPropertyName("artifactDigest")] string ArtifactDigest,
[property: JsonPropertyName("complexityScore")] double ComplexityScore,
[property: JsonPropertyName("ruleCount")] int RuleCount,
[property: JsonPropertyName("durationMilliseconds")] long DurationMilliseconds);

View File

@@ -35,17 +35,17 @@ internal sealed class PolicyPackRecord
=> revisions.IsEmpty ? 1 : revisions.Keys.Max() + 1;
}
internal sealed class PolicyRevisionRecord
{
private readonly ConcurrentDictionary<string, PolicyActivationApproval> approvals = new(StringComparer.OrdinalIgnoreCase);
public PolicyBundleRecord? Bundle { get; private set; }
public PolicyRevisionRecord(int version, bool requiresTwoPerson, PolicyRevisionStatus status, DateTimeOffset createdAt)
{
Version = version;
RequiresTwoPersonApproval = requiresTwoPerson;
Status = status;
internal sealed class PolicyRevisionRecord
{
private readonly ConcurrentDictionary<string, PolicyActivationApproval> approvals = new(StringComparer.OrdinalIgnoreCase);
public PolicyBundleRecord? Bundle { get; private set; }
public PolicyRevisionRecord(int version, bool requiresTwoPerson, PolicyRevisionStatus status, DateTimeOffset createdAt)
{
Version = version;
RequiresTwoPersonApproval = requiresTwoPerson;
Status = status;
CreatedAt = createdAt;
}
@@ -73,43 +73,102 @@ internal sealed class PolicyRevisionRecord
}
}
public PolicyActivationApprovalStatus AddApproval(PolicyActivationApproval approval)
{
if (!approvals.TryAdd(approval.ActorId, approval))
{
return PolicyActivationApprovalStatus.Duplicate;
public PolicyActivationApprovalStatus AddApproval(PolicyActivationApproval approval)
{
if (!approvals.TryAdd(approval.ActorId, approval))
{
return PolicyActivationApprovalStatus.Duplicate;
}
return approvals.Count >= 2
? PolicyActivationApprovalStatus.ThresholdReached
: PolicyActivationApprovalStatus.Pending;
}
public void SetBundle(PolicyBundleRecord bundle)
{
Bundle = bundle ?? throw new ArgumentNullException(nameof(bundle));
}
}
internal enum PolicyRevisionStatus
{
Draft,
? PolicyActivationApprovalStatus.ThresholdReached
: PolicyActivationApprovalStatus.Pending;
}
public void SetBundle(PolicyBundleRecord bundle)
{
Bundle = bundle ?? throw new ArgumentNullException(nameof(bundle));
}
}
internal enum PolicyRevisionStatus
{
Draft,
Approved,
Active
}
internal sealed record PolicyActivationApproval(string ActorId, DateTimeOffset ApprovedAt, string? Comment);
internal enum PolicyActivationApprovalStatus
{
Pending,
ThresholdReached,
Duplicate
}
internal sealed record PolicyBundleRecord(
string Digest,
string Signature,
int Size,
DateTimeOffset CreatedAt,
ImmutableArray<byte> Payload);
internal sealed record PolicyActivationApproval(string ActorId, DateTimeOffset ApprovedAt, string? Comment);
internal enum PolicyActivationApprovalStatus
{
Pending,
ThresholdReached,
Duplicate
}
internal sealed record PolicyBundleRecord(
string Digest,
string Signature,
int Size,
DateTimeOffset CreatedAt,
ImmutableArray<byte> Payload,
PolicyAocMetadata? AocMetadata = null);
/// <summary>
/// Attestation of Compliance metadata for a policy revision.
/// Links policy decisions to explanation trees and AOC chain.
/// </summary>
internal sealed record PolicyAocMetadata(
/// <summary>Unique identifier for this compilation run.</summary>
string CompilationId,
/// <summary>Version of the compiler used (e.g., "stella-dsl@1").</summary>
string CompilerVersion,
/// <summary>Timestamp when compilation started.</summary>
DateTimeOffset CompiledAt,
/// <summary>SHA256 digest of the source policy document.</summary>
string SourceDigest,
/// <summary>SHA256 digest of the compiled artifact.</summary>
string ArtifactDigest,
/// <summary>Complexity score from compilation analysis.</summary>
double ComplexityScore,
/// <summary>Number of rules in the compiled policy.</summary>
int RuleCount,
/// <summary>Compilation duration in milliseconds.</summary>
long DurationMilliseconds,
/// <summary>Provenance information about the source.</summary>
PolicyProvenance? Provenance = null,
/// <summary>Reference to the signed attestation envelope.</summary>
PolicyAttestationRef? AttestationRef = null);
/// <summary>
/// Provenance information for policy source tracking.
/// </summary>
internal sealed record PolicyProvenance(
/// <summary>Type of source (git, upload, api).</summary>
string SourceType,
/// <summary>URL or path to the source.</summary>
string? SourceUrl,
/// <summary>User or service that submitted the policy.</summary>
string? Submitter,
/// <summary>Git commit SHA if applicable.</summary>
string? CommitSha,
/// <summary>Git branch if applicable.</summary>
string? Branch,
/// <summary>Timestamp when source was ingested.</summary>
DateTimeOffset IngestedAt);
/// <summary>
/// Reference to a signed DSSE attestation for the policy compilation.
/// </summary>
internal sealed record PolicyAttestationRef(
/// <summary>Unique identifier for the attestation.</summary>
string AttestationId,
/// <summary>SHA256 digest of the attestation envelope.</summary>
string EnvelopeDigest,
/// <summary>URI where the attestation can be retrieved.</summary>
string? Uri,
/// <summary>Key identifier used for signing.</summary>
string? SigningKeyId,
/// <summary>Timestamp when attestation was created.</summary>
DateTimeOffset CreatedAt);

View File

@@ -0,0 +1,495 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.IncrementalOrchestrator;
/// <summary>
/// Background service that continuously processes policy change events
/// and schedules incremental re-evaluations.
/// </summary>
public sealed class IncrementalOrchestratorBackgroundService : IDisposable
{
private readonly IncrementalPolicyOrchestrator _orchestrator;
private readonly IncrementalOrchestratorOptions _options;
private readonly TimeProvider _timeProvider;
private readonly IncrementalOrchestratorMetrics _metrics;
private CancellationTokenSource? _cts;
private Task? _executingTask;
private bool _disposed;
public IncrementalOrchestratorBackgroundService(
IncrementalPolicyOrchestrator orchestrator,
IncrementalOrchestratorOptions? options = null,
TimeProvider? timeProvider = null,
IncrementalOrchestratorMetrics? metrics = null)
{
_orchestrator = orchestrator ?? throw new ArgumentNullException(nameof(orchestrator));
_options = options ?? IncrementalOrchestratorOptions.Default;
_timeProvider = timeProvider ?? TimeProvider.System;
_metrics = metrics ?? new IncrementalOrchestratorMetrics();
}
/// <summary>
/// Starts the background processing.
/// </summary>
public Task StartAsync(CancellationToken cancellationToken)
{
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_executingTask = ExecuteAsync(_cts.Token);
return Task.CompletedTask;
}
/// <summary>
/// Stops the background processing.
/// </summary>
public async Task StopAsync(CancellationToken cancellationToken)
{
if (_cts is null || _executingTask is null)
{
return;
}
await _cts.CancelAsync().ConfigureAwait(false);
try
{
await _executingTask.WaitAsync(cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
// Expected during shutdown
}
}
private async Task ExecuteAsync(CancellationToken stoppingToken)
{
using var pollTimer = new PeriodicTimer(_options.PollInterval);
while (!stoppingToken.IsCancellationRequested)
{
try
{
await pollTimer.WaitForNextTickAsync(stoppingToken).ConfigureAwait(false);
var stopwatch = Stopwatch.StartNew();
var result = await _orchestrator.ProcessAsync(stoppingToken).ConfigureAwait(false);
stopwatch.Stop();
// Record metrics
_metrics.RecordProcessingCycle(result, stopwatch.Elapsed);
if (result.HasWork)
{
_metrics.RecordEventsProcessed(
result.TotalEventsRead,
result.EventsSkippedOld,
result.EventsSkippedDuplicate);
_metrics.RecordBatches(
result.BatchesProcessed,
result.BatchesFailed);
foreach (var jobId in result.JobsCreated)
{
_metrics.RecordJobCreated(jobId);
}
}
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_metrics.RecordError(ex);
// Wait before retrying after error
try
{
await Task.Delay(_options.RetryBackoff, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
break;
}
}
}
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_cts?.Cancel();
_cts?.Dispose();
}
}
/// <summary>
/// Metrics collector for the incremental orchestrator.
/// </summary>
public class IncrementalOrchestratorMetrics
{
private long _totalCycles;
private long _totalEventsRead;
private long _totalEventsSkippedOld;
private long _totalEventsSkippedDuplicate;
private long _totalBatchesProcessed;
private long _totalBatchesFailed;
private long _totalJobsCreated;
private long _totalErrors;
private TimeSpan _totalProcessingTime;
private readonly object _lock = new();
/// <summary>
/// Records a processing cycle.
/// </summary>
public virtual void RecordProcessingCycle(OrchestratorProcessResult result, TimeSpan duration)
{
lock (_lock)
{
_totalCycles++;
_totalProcessingTime += duration;
}
}
/// <summary>
/// Records events processed.
/// </summary>
public virtual void RecordEventsProcessed(int total, int skippedOld, int skippedDuplicate)
{
lock (_lock)
{
_totalEventsRead += total;
_totalEventsSkippedOld += skippedOld;
_totalEventsSkippedDuplicate += skippedDuplicate;
}
}
/// <summary>
/// Records batches processed.
/// </summary>
public virtual void RecordBatches(int processed, int failed)
{
lock (_lock)
{
_totalBatchesProcessed += processed;
_totalBatchesFailed += failed;
}
}
/// <summary>
/// Records a job created.
/// </summary>
public virtual void RecordJobCreated(string jobId)
{
Interlocked.Increment(ref _totalJobsCreated);
}
/// <summary>
/// Records an error.
/// </summary>
public virtual void RecordError(Exception ex)
{
Interlocked.Increment(ref _totalErrors);
}
/// <summary>
/// Gets current metrics snapshot.
/// </summary>
public IncrementalOrchestratorMetricsSnapshot GetSnapshot()
{
lock (_lock)
{
return new IncrementalOrchestratorMetricsSnapshot
{
TotalCycles = _totalCycles,
TotalEventsRead = _totalEventsRead,
TotalEventsSkippedOld = _totalEventsSkippedOld,
TotalEventsSkippedDuplicate = _totalEventsSkippedDuplicate,
TotalBatchesProcessed = _totalBatchesProcessed,
TotalBatchesFailed = _totalBatchesFailed,
TotalJobsCreated = _totalJobsCreated,
TotalErrors = _totalErrors,
TotalProcessingTime = _totalProcessingTime
};
}
}
}
/// <summary>
/// Snapshot of orchestrator metrics.
/// </summary>
public sealed record IncrementalOrchestratorMetricsSnapshot
{
public long TotalCycles { get; init; }
public long TotalEventsRead { get; init; }
public long TotalEventsSkippedOld { get; init; }
public long TotalEventsSkippedDuplicate { get; init; }
public long TotalBatchesProcessed { get; init; }
public long TotalBatchesFailed { get; init; }
public long TotalJobsCreated { get; init; }
public long TotalErrors { get; init; }
public TimeSpan TotalProcessingTime { get; init; }
public double AverageProcessingTimeMs =>
TotalCycles > 0 ? TotalProcessingTime.TotalMilliseconds / TotalCycles : 0;
public double BatchSuccessRate =>
TotalBatchesProcessed + TotalBatchesFailed > 0
? (double)TotalBatchesProcessed / (TotalBatchesProcessed + TotalBatchesFailed)
: 1.0;
}
/// <summary>
/// Builder for creating a configured IncrementalOrchestratorBackgroundService.
/// </summary>
public sealed class IncrementalOrchestratorBuilder
{
private IPolicyChangeEventSource? _eventSource;
private IPolicyReEvaluationSubmitter? _submitter;
private IPolicyChangeIdempotencyStore? _idempotencyStore;
private IncrementalOrchestratorOptions _options = IncrementalOrchestratorOptions.Default;
private TimeProvider _timeProvider = TimeProvider.System;
private IncrementalOrchestratorMetrics? _metrics;
public IncrementalOrchestratorBuilder WithEventSource(IPolicyChangeEventSource source)
{
_eventSource = source;
return this;
}
public IncrementalOrchestratorBuilder WithSubmitter(IPolicyReEvaluationSubmitter submitter)
{
_submitter = submitter;
return this;
}
public IncrementalOrchestratorBuilder WithIdempotencyStore(IPolicyChangeIdempotencyStore store)
{
_idempotencyStore = store;
return this;
}
public IncrementalOrchestratorBuilder WithOptions(IncrementalOrchestratorOptions options)
{
_options = options;
return this;
}
public IncrementalOrchestratorBuilder WithOptions(Action<IncrementalOrchestratorOptions> configure)
{
var options = new IncrementalOrchestratorOptions();
configure(options);
_options = options;
return this;
}
public IncrementalOrchestratorBuilder WithTimeProvider(TimeProvider timeProvider)
{
_timeProvider = timeProvider;
return this;
}
public IncrementalOrchestratorBuilder WithMetrics(IncrementalOrchestratorMetrics metrics)
{
_metrics = metrics;
return this;
}
public IncrementalOrchestratorBackgroundService Build()
{
if (_eventSource is null)
{
throw new InvalidOperationException("Event source is required");
}
if (_submitter is null)
{
throw new InvalidOperationException("Submitter is required");
}
_idempotencyStore ??= new InMemoryPolicyChangeIdempotencyStore();
_metrics ??= new IncrementalOrchestratorMetrics();
var orchestrator = new IncrementalPolicyOrchestrator(
_eventSource,
_submitter,
_idempotencyStore,
_options,
_timeProvider);
return new IncrementalOrchestratorBackgroundService(
orchestrator,
_options,
_timeProvider,
_metrics);
}
}
/// <summary>
/// Default implementation that creates policy run jobs from change batches.
/// </summary>
public sealed class DefaultPolicyReEvaluationSubmitter : IPolicyReEvaluationSubmitter
{
private readonly TimeProvider _timeProvider;
public DefaultPolicyReEvaluationSubmitter(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Delegate for creating policy run jobs.
/// </summary>
public Func<PolicyRunJobRequest, CancellationToken, Task<string>>? OnSubmitJob { get; set; }
public async Task<PolicyReEvaluationResult> SubmitAsync(
PolicyChangeBatch batch,
CancellationToken cancellationToken)
{
var stopwatch = Stopwatch.StartNew();
var jobIds = new List<string>();
try
{
// Build metadata for incremental targeting
var metadata = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
if (!batch.VulnerabilityIds.IsDefaultOrEmpty)
{
metadata["delta.vulnerabilities"] = string.Join(";", batch.VulnerabilityIds);
}
if (!batch.AffectedPurls.IsDefaultOrEmpty)
{
metadata["delta.purls"] = string.Join(";", batch.AffectedPurls.Take(100)); // Limit size
}
if (!batch.AffectedProductKeys.IsDefaultOrEmpty)
{
metadata["delta.productkeys"] = string.Join(";", batch.AffectedProductKeys.Take(100));
}
if (!batch.AffectedSbomIds.IsDefaultOrEmpty)
{
metadata["delta.sboms"] = string.Join(";", batch.AffectedSbomIds.Take(100));
}
metadata["orchestrator.batchId"] = batch.BatchId;
metadata["orchestrator.eventCount"] = batch.Events.Length.ToString();
metadata["orchestrator.priority"] = batch.Priority.ToString().ToLowerInvariant();
var request = new PolicyRunJobRequest
{
TenantId = batch.TenantId,
Mode = PolicyRunJobMode.Incremental,
Priority = MapPriority(batch.Priority),
Metadata = metadata.ToImmutable(),
QueuedAt = _timeProvider.GetUtcNow(),
CorrelationId = batch.BatchId
};
if (OnSubmitJob is not null)
{
var jobId = await OnSubmitJob(request, cancellationToken).ConfigureAwait(false);
jobIds.Add(jobId);
}
else
{
// Simulate job creation for testing
jobIds.Add($"prj-{batch.BatchId[4..]}");
}
stopwatch.Stop();
return new PolicyReEvaluationResult
{
Succeeded = true,
JobIds = jobIds.ToImmutableArray(),
EstimatedFindingsCount = EstimateFindings(batch),
ProcessingTimeMs = stopwatch.ElapsedMilliseconds
};
}
catch (Exception ex)
{
stopwatch.Stop();
return new PolicyReEvaluationResult
{
Succeeded = false,
JobIds = ImmutableArray<string>.Empty,
Error = ex.Message,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds
};
}
}
private static PolicyRunJobPriority MapPriority(PolicyChangePriority priority)
{
return priority switch
{
PolicyChangePriority.Emergency => PolicyRunJobPriority.Emergency,
PolicyChangePriority.High => PolicyRunJobPriority.High,
_ => PolicyRunJobPriority.Normal
};
}
private static int EstimateFindings(PolicyChangeBatch batch)
{
// Rough estimate based on batch contents
var vulnCount = batch.VulnerabilityIds.Length;
var purlCount = batch.AffectedPurls.Length;
var sbomCount = batch.AffectedSbomIds.Length;
// Assume average of 5 findings per vulnerability per SBOM
if (vulnCount > 0 && sbomCount > 0)
{
return vulnCount * sbomCount * 5;
}
// Assume average of 10 findings per PURL
if (purlCount > 0)
{
return purlCount * 10;
}
return batch.Events.Length * 5;
}
}
/// <summary>
/// Request to create a policy run job.
/// </summary>
public sealed record PolicyRunJobRequest
{
public required string TenantId { get; init; }
public required PolicyRunJobMode Mode { get; init; }
public required PolicyRunJobPriority Priority { get; init; }
public ImmutableSortedDictionary<string, string>? Metadata { get; init; }
public DateTimeOffset QueuedAt { get; init; }
public string? CorrelationId { get; init; }
}
/// <summary>
/// Policy run job mode.
/// </summary>
public enum PolicyRunJobMode
{
Full,
Incremental
}
/// <summary>
/// Policy run job priority.
/// </summary>
public enum PolicyRunJobPriority
{
Normal,
High,
Emergency
}

View File

@@ -0,0 +1,536 @@
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Policy.Engine.IncrementalOrchestrator;
/// <summary>
/// Configuration options for the incremental policy orchestrator.
/// </summary>
public sealed record IncrementalOrchestratorOptions
{
/// <summary>
/// How often to poll for new change events.
/// </summary>
public TimeSpan PollInterval { get; init; } = TimeSpan.FromSeconds(5);
/// <summary>
/// How long to wait before batching events together.
/// </summary>
public TimeSpan BatchWindow { get; init; } = TimeSpan.FromSeconds(10);
/// <summary>
/// Maximum events per batch.
/// </summary>
public int MaxBatchSize { get; init; } = 100;
/// <summary>
/// Maximum retry attempts for failed processing.
/// </summary>
public int MaxRetryAttempts { get; init; } = 3;
/// <summary>
/// Delay between retry attempts.
/// </summary>
public TimeSpan RetryBackoff { get; init; } = TimeSpan.FromSeconds(5);
/// <summary>
/// Whether to enable deduplication within batch window.
/// </summary>
public bool EnableDeduplication { get; init; } = true;
/// <summary>
/// Maximum age of events to process (older events are skipped).
/// </summary>
public TimeSpan MaxEventAge { get; init; } = TimeSpan.FromHours(24);
/// <summary>
/// Default options.
/// </summary>
public static IncrementalOrchestratorOptions Default { get; } = new();
}
/// <summary>
/// Interface for reading change events from a source.
/// </summary>
public interface IPolicyChangeEventSource
{
/// <summary>
/// Reads pending change events.
/// </summary>
IAsyncEnumerable<PolicyChangeEvent> ReadAsync(CancellationToken cancellationToken);
/// <summary>
/// Acknowledges that an event has been processed.
/// </summary>
Task AcknowledgeAsync(string eventId, CancellationToken cancellationToken);
/// <summary>
/// Marks an event as failed for retry.
/// </summary>
Task MarkFailedAsync(string eventId, string error, CancellationToken cancellationToken);
}
/// <summary>
/// Interface for submitting policy re-evaluation jobs.
/// </summary>
public interface IPolicyReEvaluationSubmitter
{
/// <summary>
/// Submits a batch for re-evaluation.
/// </summary>
Task<PolicyReEvaluationResult> SubmitAsync(
PolicyChangeBatch batch,
CancellationToken cancellationToken);
}
/// <summary>
/// Interface for idempotency tracking.
/// </summary>
public interface IPolicyChangeIdempotencyStore
{
/// <summary>
/// Checks if an event has already been processed.
/// </summary>
Task<bool> HasSeenAsync(string eventId, CancellationToken cancellationToken);
/// <summary>
/// Marks an event as processed.
/// </summary>
Task MarkSeenAsync(string eventId, DateTimeOffset processedAt, CancellationToken cancellationToken);
}
/// <summary>
/// Result of a policy re-evaluation submission.
/// </summary>
public sealed record PolicyReEvaluationResult
{
/// <summary>
/// Whether the submission succeeded.
/// </summary>
public required bool Succeeded { get; init; }
/// <summary>
/// Job ID(s) created for the re-evaluation.
/// </summary>
public required ImmutableArray<string> JobIds { get; init; }
/// <summary>
/// Number of findings that will be re-evaluated.
/// </summary>
public int EstimatedFindingsCount { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Processing duration.
/// </summary>
public long ProcessingTimeMs { get; init; }
}
/// <summary>
/// Orchestrates incremental policy re-evaluations in response to
/// advisory, VEX, and SBOM change streams.
/// </summary>
public sealed class IncrementalPolicyOrchestrator
{
private readonly IPolicyChangeEventSource _eventSource;
private readonly IPolicyReEvaluationSubmitter _submitter;
private readonly IPolicyChangeIdempotencyStore _idempotencyStore;
private readonly IncrementalOrchestratorOptions _options;
private readonly TimeProvider _timeProvider;
public IncrementalPolicyOrchestrator(
IPolicyChangeEventSource eventSource,
IPolicyReEvaluationSubmitter submitter,
IPolicyChangeIdempotencyStore idempotencyStore,
IncrementalOrchestratorOptions? options = null,
TimeProvider? timeProvider = null)
{
_eventSource = eventSource ?? throw new ArgumentNullException(nameof(eventSource));
_submitter = submitter ?? throw new ArgumentNullException(nameof(submitter));
_idempotencyStore = idempotencyStore ?? throw new ArgumentNullException(nameof(idempotencyStore));
_options = options ?? IncrementalOrchestratorOptions.Default;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Processes a single batch of pending events.
/// </summary>
public async Task<OrchestratorProcessResult> ProcessAsync(CancellationToken cancellationToken)
{
var stopwatch = Stopwatch.StartNew();
var now = _timeProvider.GetUtcNow();
var cutoffTime = now - _options.MaxEventAge;
var eventsByTenant = new Dictionary<string, List<PolicyChangeEvent>>(StringComparer.OrdinalIgnoreCase);
var skippedOld = 0;
var skippedDuplicate = 0;
var totalRead = 0;
// Read and group events by tenant
await foreach (var evt in _eventSource.ReadAsync(cancellationToken))
{
totalRead++;
// Skip events older than max age
if (evt.OccurredAt < cutoffTime)
{
skippedOld++;
await _eventSource.AcknowledgeAsync(evt.EventId, cancellationToken).ConfigureAwait(false);
continue;
}
// Check idempotency
if (_options.EnableDeduplication &&
await _idempotencyStore.HasSeenAsync(evt.EventId, cancellationToken).ConfigureAwait(false))
{
skippedDuplicate++;
await _eventSource.AcknowledgeAsync(evt.EventId, cancellationToken).ConfigureAwait(false);
continue;
}
if (!eventsByTenant.TryGetValue(evt.TenantId, out var tenantEvents))
{
tenantEvents = new List<PolicyChangeEvent>();
eventsByTenant[evt.TenantId] = tenantEvents;
}
tenantEvents.Add(evt);
// Limit total events per processing cycle
if (totalRead >= _options.MaxBatchSize * 10)
{
break;
}
}
var batchesProcessed = 0;
var batchesFailed = 0;
var jobsCreated = new List<string>();
// Process each tenant's events
foreach (var (tenantId, events) in eventsByTenant.OrderBy(kvp => kvp.Key, StringComparer.Ordinal))
{
var batches = CreateBatches(tenantId, events, now);
foreach (var batch in batches)
{
var attempts = 0;
var success = false;
while (attempts < _options.MaxRetryAttempts && !success)
{
try
{
cancellationToken.ThrowIfCancellationRequested();
var result = await _submitter.SubmitAsync(batch, cancellationToken).ConfigureAwait(false);
if (result.Succeeded)
{
success = true;
batchesProcessed++;
jobsCreated.AddRange(result.JobIds);
// Mark all events in batch as seen
foreach (var evt in batch.Events)
{
await _idempotencyStore.MarkSeenAsync(evt.EventId, now, cancellationToken)
.ConfigureAwait(false);
await _eventSource.AcknowledgeAsync(evt.EventId, cancellationToken)
.ConfigureAwait(false);
}
}
else
{
attempts++;
if (attempts < _options.MaxRetryAttempts)
{
await Task.Delay(_options.RetryBackoff, cancellationToken).ConfigureAwait(false);
}
}
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception ex)
{
attempts++;
if (attempts >= _options.MaxRetryAttempts)
{
batchesFailed++;
foreach (var evt in batch.Events)
{
await _eventSource.MarkFailedAsync(evt.EventId, ex.Message, cancellationToken)
.ConfigureAwait(false);
}
}
else
{
await Task.Delay(_options.RetryBackoff, cancellationToken).ConfigureAwait(false);
}
}
}
}
}
stopwatch.Stop();
return new OrchestratorProcessResult
{
TotalEventsRead = totalRead,
EventsSkippedOld = skippedOld,
EventsSkippedDuplicate = skippedDuplicate,
BatchesProcessed = batchesProcessed,
BatchesFailed = batchesFailed,
JobsCreated = jobsCreated.ToImmutableArray(),
ProcessingTimeMs = stopwatch.ElapsedMilliseconds
};
}
/// <summary>
/// Creates deterministically ordered batches from events.
/// </summary>
private IReadOnlyList<PolicyChangeBatch> CreateBatches(
string tenantId,
IReadOnlyList<PolicyChangeEvent> events,
DateTimeOffset now)
{
// Sort by priority (highest first), then by occurred time
var ordered = events
.OrderByDescending(e => (int)e.Priority)
.ThenBy(e => e.OccurredAt)
.ThenBy(e => e.EventId, StringComparer.Ordinal)
.ToList();
var batches = new List<PolicyChangeBatch>();
var currentBatch = new List<PolicyChangeEvent>();
var currentPriority = PolicyChangePriority.Normal;
foreach (var evt in ordered)
{
// Start new batch if priority changes or batch is full
if (currentBatch.Count > 0 &&
(evt.Priority != currentPriority || currentBatch.Count >= _options.MaxBatchSize))
{
batches.Add(CreateBatchFromEvents(tenantId, currentBatch, currentPriority, now));
currentBatch = new List<PolicyChangeEvent>();
}
currentBatch.Add(evt);
currentPriority = evt.Priority;
}
// Add final batch
if (currentBatch.Count > 0)
{
batches.Add(CreateBatchFromEvents(tenantId, currentBatch, currentPriority, now));
}
return batches;
}
private static PolicyChangeBatch CreateBatchFromEvents(
string tenantId,
IReadOnlyList<PolicyChangeEvent> events,
PolicyChangePriority priority,
DateTimeOffset createdAt)
{
var batchId = CreateBatchId(tenantId, events, createdAt);
// Aggregate all affected items
var allPurls = events
.SelectMany(e => e.AffectedPurls)
.Where(p => !string.IsNullOrWhiteSpace(p))
.Distinct(StringComparer.Ordinal)
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableArray();
var allProductKeys = events
.SelectMany(e => e.AffectedProductKeys)
.Where(k => !string.IsNullOrWhiteSpace(k))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(k => k, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var allSbomIds = events
.SelectMany(e => e.AffectedSbomIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var allVulnIds = events
.Select(e => e.VulnerabilityId)
.Where(v => !string.IsNullOrWhiteSpace(v))
.Cast<string>()
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(v => v, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return new PolicyChangeBatch
{
BatchId = batchId,
TenantId = tenantId,
Events = events.ToImmutableArray(),
Priority = priority,
CreatedAt = createdAt,
AffectedPurls = allPurls,
AffectedProductKeys = allProductKeys,
AffectedSbomIds = allSbomIds,
VulnerabilityIds = allVulnIds
};
}
private static string CreateBatchId(
string tenantId,
IReadOnlyList<PolicyChangeEvent> events,
DateTimeOffset createdAt)
{
var builder = new StringBuilder();
builder.Append(tenantId).Append('|');
builder.Append(createdAt.ToString("O")).Append('|');
foreach (var evt in events.OrderBy(e => e.EventId, StringComparer.Ordinal))
{
builder.Append(evt.EventId).Append('|');
}
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
return $"pcb-{Convert.ToHexStringLower(bytes)[..16]}";
}
}
/// <summary>
/// Result of an orchestrator processing cycle.
/// </summary>
public sealed record OrchestratorProcessResult
{
/// <summary>
/// Total events read from source.
/// </summary>
public required int TotalEventsRead { get; init; }
/// <summary>
/// Events skipped due to age.
/// </summary>
public required int EventsSkippedOld { get; init; }
/// <summary>
/// Events skipped due to deduplication.
/// </summary>
public required int EventsSkippedDuplicate { get; init; }
/// <summary>
/// Batches successfully processed.
/// </summary>
public required int BatchesProcessed { get; init; }
/// <summary>
/// Batches that failed after retries.
/// </summary>
public required int BatchesFailed { get; init; }
/// <summary>
/// Job IDs created during processing.
/// </summary>
public required ImmutableArray<string> JobsCreated { get; init; }
/// <summary>
/// Total processing time in milliseconds.
/// </summary>
public required long ProcessingTimeMs { get; init; }
/// <summary>
/// Whether any work was done.
/// </summary>
public bool HasWork => TotalEventsRead > 0;
/// <summary>
/// Whether all batches succeeded.
/// </summary>
public bool AllSucceeded => BatchesFailed == 0;
}
/// <summary>
/// In-memory implementation of policy change event source for testing.
/// </summary>
public sealed class InMemoryPolicyChangeEventSource : IPolicyChangeEventSource
{
private readonly ConcurrentQueue<PolicyChangeEvent> _pending = new();
private readonly ConcurrentDictionary<string, PolicyChangeEvent> _failed = new();
private readonly ConcurrentDictionary<string, PolicyChangeEvent> _acknowledged = new();
public void Enqueue(PolicyChangeEvent evt)
{
_pending.Enqueue(evt);
}
public void EnqueueRange(IEnumerable<PolicyChangeEvent> events)
{
foreach (var evt in events)
{
_pending.Enqueue(evt);
}
}
public async IAsyncEnumerable<PolicyChangeEvent> ReadAsync(
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
while (_pending.TryDequeue(out var evt))
{
cancellationToken.ThrowIfCancellationRequested();
yield return evt;
}
await Task.CompletedTask;
}
public Task AcknowledgeAsync(string eventId, CancellationToken cancellationToken)
{
// Remove from failed if retrying
_failed.TryRemove(eventId, out _);
return Task.CompletedTask;
}
public Task MarkFailedAsync(string eventId, string error, CancellationToken cancellationToken)
{
// Events could be tracked for retry
return Task.CompletedTask;
}
public int PendingCount => _pending.Count;
public IReadOnlyCollection<PolicyChangeEvent> GetAcknowledged() =>
_acknowledged.Values.ToList();
}
/// <summary>
/// In-memory implementation of idempotency store for testing.
/// </summary>
public sealed class InMemoryPolicyChangeIdempotencyStore : IPolicyChangeIdempotencyStore
{
private readonly ConcurrentDictionary<string, DateTimeOffset> _seen = new(StringComparer.Ordinal);
public Task<bool> HasSeenAsync(string eventId, CancellationToken cancellationToken)
{
return Task.FromResult(_seen.ContainsKey(eventId));
}
public Task MarkSeenAsync(string eventId, DateTimeOffset processedAt, CancellationToken cancellationToken)
{
_seen[eventId] = processedAt;
return Task.CompletedTask;
}
public int SeenCount => _seen.Count;
public void Clear() => _seen.Clear();
}

View File

@@ -0,0 +1,535 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Policy.Engine.IncrementalOrchestrator;
/// <summary>
/// Types of policy-relevant changes that trigger re-evaluation.
/// </summary>
public enum PolicyChangeType
{
/// <summary>Advisory was created or updated.</summary>
AdvisoryUpdated,
/// <summary>Advisory was retracted/withdrawn.</summary>
AdvisoryRetracted,
/// <summary>VEX statement was added or modified.</summary>
VexStatementUpdated,
/// <summary>VEX conflict detected.</summary>
VexConflictDetected,
/// <summary>SBOM was ingested or updated.</summary>
SbomUpdated,
/// <summary>SBOM component changed.</summary>
SbomComponentChanged,
/// <summary>Policy version was published.</summary>
PolicyVersionPublished,
/// <summary>Manual re-evaluation triggered.</summary>
ManualTrigger
}
/// <summary>
/// Priority levels for change processing.
/// </summary>
public enum PolicyChangePriority
{
/// <summary>Normal priority - standard processing.</summary>
Normal = 0,
/// <summary>High priority - process sooner.</summary>
High = 1,
/// <summary>Emergency - immediate processing (e.g., KEV addition).</summary>
Emergency = 2
}
/// <summary>
/// Represents a change event that may trigger policy re-evaluation.
/// </summary>
public sealed record PolicyChangeEvent
{
/// <summary>
/// Unique event identifier (deterministic based on content).
/// </summary>
public required string EventId { get; init; }
/// <summary>
/// Type of change.
/// </summary>
public required PolicyChangeType ChangeType { get; init; }
/// <summary>
/// Tenant context for the change.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Timestamp when the change occurred (from source system).
/// </summary>
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Timestamp when the event was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Processing priority.
/// </summary>
public required PolicyChangePriority Priority { get; init; }
/// <summary>
/// Source system that produced the change.
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Correlation ID for tracing.
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// Advisory ID (for advisory/VEX changes).
/// </summary>
public string? AdvisoryId { get; init; }
/// <summary>
/// Vulnerability ID (CVE, GHSA, etc.).
/// </summary>
public string? VulnerabilityId { get; init; }
/// <summary>
/// Affected PURLs (package URLs).
/// </summary>
public ImmutableArray<string> AffectedPurls { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Affected product keys (for SBOM targeting).
/// </summary>
public ImmutableArray<string> AffectedProductKeys { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Affected SBOM IDs (for direct targeting).
/// </summary>
public ImmutableArray<string> AffectedSbomIds { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Policy IDs to re-evaluate (empty = all applicable).
/// </summary>
public ImmutableArray<string> PolicyIds { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Additional metadata for the change.
/// </summary>
public ImmutableDictionary<string, string> Metadata { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Content hash for deduplication.
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// Computes a deterministic content hash for deduplication.
/// </summary>
public static string ComputeContentHash(
PolicyChangeType changeType,
string tenantId,
string? advisoryId,
string? vulnerabilityId,
IEnumerable<string>? affectedPurls,
IEnumerable<string>? affectedProductKeys,
IEnumerable<string>? affectedSbomIds)
{
var builder = new StringBuilder();
builder.Append(changeType.ToString()).Append('|');
builder.Append(tenantId.ToLowerInvariant()).Append('|');
builder.Append(advisoryId ?? string.Empty).Append('|');
builder.Append(vulnerabilityId ?? string.Empty).Append('|');
// Deterministic ordering
var purls = (affectedPurls ?? Enumerable.Empty<string>())
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => p.Trim())
.OrderBy(p => p, StringComparer.Ordinal);
var productKeys = (affectedProductKeys ?? Enumerable.Empty<string>())
.Where(k => !string.IsNullOrWhiteSpace(k))
.Select(k => k.Trim())
.OrderBy(k => k, StringComparer.Ordinal);
var sbomIds = (affectedSbomIds ?? Enumerable.Empty<string>())
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.OrderBy(s => s, StringComparer.Ordinal);
foreach (var purl in purls)
{
builder.Append("purl:").Append(purl).Append('|');
}
foreach (var key in productKeys)
{
builder.Append("pk:").Append(key).Append('|');
}
foreach (var id in sbomIds)
{
builder.Append("sbom:").Append(id).Append('|');
}
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
return Convert.ToHexStringLower(bytes);
}
/// <summary>
/// Creates a deterministic event ID.
/// </summary>
public static string CreateEventId(
string tenantId,
PolicyChangeType changeType,
string source,
DateTimeOffset occurredAt,
string contentHash)
{
var seed = $"{tenantId}|{changeType}|{source}|{occurredAt:O}|{contentHash}";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"pce-{Convert.ToHexStringLower(bytes)[..16]}";
}
}
/// <summary>
/// Factory for creating normalized policy change events.
/// </summary>
public static class PolicyChangeEventFactory
{
/// <summary>
/// Creates an advisory update event.
/// </summary>
public static PolicyChangeEvent CreateAdvisoryUpdated(
string tenantId,
string advisoryId,
string? vulnerabilityId,
IEnumerable<string> affectedPurls,
string source,
DateTimeOffset occurredAt,
DateTimeOffset createdAt,
PolicyChangePriority priority = PolicyChangePriority.Normal,
string? correlationId = null,
ImmutableDictionary<string, string>? metadata = null)
{
var normalizedTenant = NormalizeTenant(tenantId);
var normalizedAdvisoryId = Normalize(advisoryId, nameof(advisoryId));
var normalizedVulnId = vulnerabilityId?.Trim();
var normalizedPurls = NormalizePurls(affectedPurls);
var contentHash = PolicyChangeEvent.ComputeContentHash(
PolicyChangeType.AdvisoryUpdated,
normalizedTenant,
normalizedAdvisoryId,
normalizedVulnId,
normalizedPurls,
null,
null);
var eventId = PolicyChangeEvent.CreateEventId(
normalizedTenant,
PolicyChangeType.AdvisoryUpdated,
source,
occurredAt,
contentHash);
return new PolicyChangeEvent
{
EventId = eventId,
ChangeType = PolicyChangeType.AdvisoryUpdated,
TenantId = normalizedTenant,
OccurredAt = occurredAt,
CreatedAt = createdAt,
Priority = priority,
Source = source,
CorrelationId = correlationId,
AdvisoryId = normalizedAdvisoryId,
VulnerabilityId = normalizedVulnId,
AffectedPurls = normalizedPurls,
ContentHash = contentHash,
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty
};
}
/// <summary>
/// Creates a VEX statement update event.
/// </summary>
public static PolicyChangeEvent CreateVexUpdated(
string tenantId,
string vulnerabilityId,
IEnumerable<string> affectedProductKeys,
string source,
DateTimeOffset occurredAt,
DateTimeOffset createdAt,
PolicyChangePriority priority = PolicyChangePriority.Normal,
string? correlationId = null,
ImmutableDictionary<string, string>? metadata = null)
{
var normalizedTenant = NormalizeTenant(tenantId);
var normalizedVulnId = Normalize(vulnerabilityId, nameof(vulnerabilityId));
var normalizedKeys = NormalizeProductKeys(affectedProductKeys);
var contentHash = PolicyChangeEvent.ComputeContentHash(
PolicyChangeType.VexStatementUpdated,
normalizedTenant,
null,
normalizedVulnId,
null,
normalizedKeys,
null);
var eventId = PolicyChangeEvent.CreateEventId(
normalizedTenant,
PolicyChangeType.VexStatementUpdated,
source,
occurredAt,
contentHash);
return new PolicyChangeEvent
{
EventId = eventId,
ChangeType = PolicyChangeType.VexStatementUpdated,
TenantId = normalizedTenant,
OccurredAt = occurredAt,
CreatedAt = createdAt,
Priority = priority,
Source = source,
CorrelationId = correlationId,
VulnerabilityId = normalizedVulnId,
AffectedProductKeys = normalizedKeys,
ContentHash = contentHash,
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty
};
}
/// <summary>
/// Creates an SBOM update event.
/// </summary>
public static PolicyChangeEvent CreateSbomUpdated(
string tenantId,
string sbomId,
string productKey,
IEnumerable<string> componentPurls,
string source,
DateTimeOffset occurredAt,
DateTimeOffset createdAt,
PolicyChangePriority priority = PolicyChangePriority.Normal,
string? correlationId = null,
ImmutableDictionary<string, string>? metadata = null)
{
var normalizedTenant = NormalizeTenant(tenantId);
var normalizedSbomId = Normalize(sbomId, nameof(sbomId));
var normalizedProductKey = Normalize(productKey, nameof(productKey));
var normalizedPurls = NormalizePurls(componentPurls);
var contentHash = PolicyChangeEvent.ComputeContentHash(
PolicyChangeType.SbomUpdated,
normalizedTenant,
null,
null,
normalizedPurls,
ImmutableArray.Create(normalizedProductKey),
ImmutableArray.Create(normalizedSbomId));
var eventId = PolicyChangeEvent.CreateEventId(
normalizedTenant,
PolicyChangeType.SbomUpdated,
source,
occurredAt,
contentHash);
return new PolicyChangeEvent
{
EventId = eventId,
ChangeType = PolicyChangeType.SbomUpdated,
TenantId = normalizedTenant,
OccurredAt = occurredAt,
CreatedAt = createdAt,
Priority = priority,
Source = source,
CorrelationId = correlationId,
AffectedPurls = normalizedPurls,
AffectedProductKeys = ImmutableArray.Create(normalizedProductKey),
AffectedSbomIds = ImmutableArray.Create(normalizedSbomId),
ContentHash = contentHash,
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty
};
}
/// <summary>
/// Creates a manual trigger event.
/// </summary>
public static PolicyChangeEvent CreateManualTrigger(
string tenantId,
IEnumerable<string>? policyIds,
IEnumerable<string>? sbomIds,
IEnumerable<string>? productKeys,
string requestedBy,
DateTimeOffset createdAt,
PolicyChangePriority priority = PolicyChangePriority.Normal,
string? correlationId = null,
ImmutableDictionary<string, string>? metadata = null)
{
var normalizedTenant = NormalizeTenant(tenantId);
var normalizedPolicyIds = NormalizePolicyIds(policyIds);
var normalizedSbomIds = NormalizeSbomIds(sbomIds);
var normalizedProductKeys = NormalizeProductKeys(productKeys);
var contentHash = PolicyChangeEvent.ComputeContentHash(
PolicyChangeType.ManualTrigger,
normalizedTenant,
null,
null,
null,
normalizedProductKeys,
normalizedSbomIds);
var eventId = PolicyChangeEvent.CreateEventId(
normalizedTenant,
PolicyChangeType.ManualTrigger,
"manual",
createdAt,
contentHash);
return new PolicyChangeEvent
{
EventId = eventId,
ChangeType = PolicyChangeType.ManualTrigger,
TenantId = normalizedTenant,
OccurredAt = createdAt,
CreatedAt = createdAt,
Priority = priority,
Source = "manual",
CorrelationId = correlationId,
PolicyIds = normalizedPolicyIds,
AffectedProductKeys = normalizedProductKeys,
AffectedSbomIds = normalizedSbomIds,
ContentHash = contentHash,
Metadata = (metadata ?? ImmutableDictionary<string, string>.Empty)
.SetItem("requestedBy", requestedBy)
};
}
private static string NormalizeTenant(string tenantId)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant ID cannot be null or whitespace", nameof(tenantId));
}
return tenantId.Trim().ToLowerInvariant();
}
private static string Normalize(string value, string name)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException($"{name} cannot be null or whitespace", name);
}
return value.Trim();
}
private static ImmutableArray<string> NormalizePurls(IEnumerable<string>? purls)
{
return (purls ?? Enumerable.Empty<string>())
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => p.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<string> NormalizeProductKeys(IEnumerable<string>? keys)
{
return (keys ?? Enumerable.Empty<string>())
.Where(k => !string.IsNullOrWhiteSpace(k))
.Select(k => k.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(k => k, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private static ImmutableArray<string> NormalizeSbomIds(IEnumerable<string>? ids)
{
return (ids ?? Enumerable.Empty<string>())
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<string> NormalizePolicyIds(IEnumerable<string>? ids)
{
return (ids ?? Enumerable.Empty<string>())
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(id => id, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
}
/// <summary>
/// A batch of change events to be processed together.
/// </summary>
public sealed record PolicyChangeBatch
{
/// <summary>
/// Unique batch identifier.
/// </summary>
public required string BatchId { get; init; }
/// <summary>
/// Tenant context.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Events in this batch (deterministically ordered).
/// </summary>
public required ImmutableArray<PolicyChangeEvent> Events { get; init; }
/// <summary>
/// Highest priority in the batch.
/// </summary>
public required PolicyChangePriority Priority { get; init; }
/// <summary>
/// When the batch was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Combined affected PURLs from all events.
/// </summary>
public required ImmutableArray<string> AffectedPurls { get; init; }
/// <summary>
/// Combined affected product keys from all events.
/// </summary>
public required ImmutableArray<string> AffectedProductKeys { get; init; }
/// <summary>
/// Combined affected SBOM IDs from all events.
/// </summary>
public required ImmutableArray<string> AffectedSbomIds { get; init; }
/// <summary>
/// Combined vulnerability IDs from all events.
/// </summary>
public required ImmutableArray<string> VulnerabilityIds { get; init; }
}

View File

@@ -1,5 +1,6 @@
using System.Collections.ObjectModel;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Caching;
using StellaOps.Policy.Engine.ReachabilityFacts;
using StellaOps.Policy.Engine.Telemetry;
@@ -30,6 +31,8 @@ public sealed class PolicyEngineOptions
public ReachabilityFactsCacheOptions ReachabilityCache { get; } = new();
public PolicyEvaluationCacheOptions EvaluationCache { get; } = new();
public void Validate()
{
Authority.Validate();

View File

@@ -7,6 +7,7 @@ namespace StellaOps.Policy.Engine.Services;
/// <summary>
/// Compiles policy DSL to canonical representation, signs it deterministically, and stores per revision.
/// Captures AOC (Attestation of Compliance) metadata for policy revisions.
/// </summary>
internal sealed class PolicyBundleService
{
@@ -40,7 +41,9 @@ internal sealed class PolicyBundleService
throw new ArgumentNullException(nameof(request));
}
var compiledAt = _timeProvider.GetUtcNow();
var compileResult = _compilationService.Compile(new PolicyCompileRequest(request.Dsl));
if (!compileResult.Success || compileResult.CanonicalRepresentation.IsDefaultOrEmpty)
{
return new PolicyBundleResponse(
@@ -49,30 +52,55 @@ internal sealed class PolicyBundleService
Signature: null,
SizeBytes: 0,
CreatedAt: null,
Diagnostics: compileResult.Diagnostics);
Diagnostics: compileResult.Diagnostics,
AocMetadata: null);
}
var payload = compileResult.CanonicalRepresentation.ToArray();
var digest = compileResult.Digest ?? $"sha256:{ComputeSha256Hex(payload)}";
var signature = Sign(digest, request.SigningKeyId);
var artifactDigest = compileResult.Digest ?? $"sha256:{ComputeSha256Hex(payload)}";
var sourceDigest = ComputeSourceDigest(request.Dsl.Source);
var signature = Sign(artifactDigest, request.SigningKeyId);
var createdAt = _timeProvider.GetUtcNow();
// Generate AOC metadata
var compilationId = GenerateCompilationId(packId, version, compiledAt);
var aocMetadata = CreateAocMetadata(
compilationId,
request.Dsl.Syntax,
compiledAt,
sourceDigest,
artifactDigest,
compileResult,
request.Provenance);
var record = new PolicyBundleRecord(
Digest: digest,
Digest: artifactDigest,
Signature: signature,
Size: payload.Length,
CreatedAt: createdAt,
Payload: payload.ToImmutableArray());
Payload: payload.ToImmutableArray(),
AocMetadata: aocMetadata);
await _repository.StoreBundleAsync(packId, version, record, cancellationToken).ConfigureAwait(false);
var aocResponse = new PolicyAocMetadataResponse(
CompilationId: aocMetadata.CompilationId,
CompilerVersion: aocMetadata.CompilerVersion,
CompiledAt: aocMetadata.CompiledAt,
SourceDigest: aocMetadata.SourceDigest,
ArtifactDigest: aocMetadata.ArtifactDigest,
ComplexityScore: aocMetadata.ComplexityScore,
RuleCount: aocMetadata.RuleCount,
DurationMilliseconds: aocMetadata.DurationMilliseconds);
return new PolicyBundleResponse(
Success: true,
Digest: digest,
Digest: artifactDigest,
Signature: signature,
SizeBytes: payload.Length,
CreatedAt: createdAt,
Diagnostics: compileResult.Diagnostics);
Diagnostics: compileResult.Diagnostics,
AocMetadata: aocResponse);
}
private static string ComputeSha256Hex(byte[] payload)
@@ -82,6 +110,14 @@ internal sealed class PolicyBundleService
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeSourceDigest(string source)
{
var bytes = Encoding.UTF8.GetBytes(source);
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(bytes, hash);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string Sign(string digest, string? signingKeyId)
{
// Deterministic signature stub suitable for offline testing.
@@ -89,4 +125,51 @@ internal sealed class PolicyBundleService
var mac = HMACSHA256.HashData(Encoding.UTF8.GetBytes(key), Encoding.UTF8.GetBytes(digest));
return $"sig:sha256:{Convert.ToHexString(mac).ToLowerInvariant()}";
}
private static string GenerateCompilationId(string packId, int version, DateTimeOffset timestamp)
{
// Deterministic compilation ID based on pack, version, and timestamp
var input = $"{packId}:{version}:{timestamp:O}";
var bytes = Encoding.UTF8.GetBytes(input);
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(bytes, hash);
return $"comp-{Convert.ToHexString(hash).ToLowerInvariant()[..16]}";
}
private static PolicyAocMetadata CreateAocMetadata(
string compilationId,
string compilerVersion,
DateTimeOffset compiledAt,
string sourceDigest,
string artifactDigest,
PolicyCompilationResultDto compileResult,
PolicyProvenanceInput? provenanceInput)
{
var complexity = compileResult.Complexity;
var statistics = compileResult.Statistics;
PolicyProvenance? provenance = null;
if (provenanceInput is not null)
{
provenance = new PolicyProvenance(
SourceType: provenanceInput.SourceType,
SourceUrl: provenanceInput.SourceUrl,
Submitter: provenanceInput.Submitter,
CommitSha: provenanceInput.CommitSha,
Branch: provenanceInput.Branch,
IngestedAt: compiledAt);
}
return new PolicyAocMetadata(
CompilationId: compilationId,
CompilerVersion: compilerVersion,
CompiledAt: compiledAt,
SourceDigest: sourceDigest,
ArtifactDigest: artifactDigest,
ComplexityScore: complexity?.Score ?? 0,
RuleCount: statistics?.RuleCount ?? complexity?.RuleCount ?? 0,
DurationMilliseconds: compileResult.DurationMilliseconds,
Provenance: provenance,
AttestationRef: null);
}
}

View File

@@ -0,0 +1,425 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Caching;
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Evaluation;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.Services;
/// <summary>
/// Request for runtime policy evaluation over linkset/SBOM data.
/// </summary>
internal sealed record RuntimeEvaluationRequest(
string PackId,
int Version,
string TenantId,
string SubjectPurl,
string AdvisoryId,
PolicyEvaluationSeverity Severity,
PolicyEvaluationAdvisory Advisory,
PolicyEvaluationVexEvidence Vex,
PolicyEvaluationSbom Sbom,
PolicyEvaluationExceptions Exceptions,
PolicyEvaluationReachability Reachability,
DateTimeOffset? EvaluationTimestamp = null,
bool BypassCache = false);
/// <summary>
/// Response from runtime policy evaluation.
/// </summary>
internal sealed record RuntimeEvaluationResponse(
string PackId,
int Version,
string PolicyDigest,
string Status,
string? Severity,
string? RuleName,
int? Priority,
ImmutableDictionary<string, string> Annotations,
ImmutableArray<string> Warnings,
PolicyExceptionApplication? AppliedException,
string CorrelationId,
bool Cached,
CacheSource CacheSource,
long EvaluationDurationMs);
/// <summary>
/// Runtime evaluator executing compiled policy plans over advisory/VEX linksets and SBOM asset metadata
/// with deterministic caching (Redis) and fallback path.
/// </summary>
internal sealed class PolicyRuntimeEvaluationService
{
private readonly IPolicyPackRepository _repository;
private readonly IPolicyEvaluationCache _cache;
private readonly PolicyEvaluator _evaluator;
private readonly TimeProvider _timeProvider;
private readonly ILogger<PolicyRuntimeEvaluationService> _logger;
private static readonly JsonSerializerOptions ContextSerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
};
public PolicyRuntimeEvaluationService(
IPolicyPackRepository repository,
IPolicyEvaluationCache cache,
PolicyEvaluator evaluator,
TimeProvider timeProvider,
ILogger<PolicyRuntimeEvaluationService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_evaluator = evaluator ?? throw new ArgumentNullException(nameof(evaluator));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Evaluates a policy against the provided context with deterministic caching.
/// </summary>
public async Task<RuntimeEvaluationResponse> EvaluateAsync(
RuntimeEvaluationRequest request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var startTimestamp = _timeProvider.GetTimestamp();
var evaluationTimestamp = request.EvaluationTimestamp ?? _timeProvider.GetUtcNow();
// Load the compiled policy bundle
var bundle = await _repository.GetBundleAsync(request.PackId, request.Version, cancellationToken)
.ConfigureAwait(false);
if (bundle is null)
{
throw new InvalidOperationException(
$"Policy bundle not found for pack '{request.PackId}' version {request.Version}.");
}
// Compute deterministic cache key
var subjectDigest = ComputeSubjectDigest(request.TenantId, request.SubjectPurl, request.AdvisoryId);
var contextDigest = ComputeContextDigest(request);
var cacheKey = PolicyEvaluationCacheKey.Create(bundle.Digest, subjectDigest, contextDigest);
// Try cache lookup unless bypassed
if (!request.BypassCache)
{
var cacheResult = await _cache.GetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
if (cacheResult.CacheHit && cacheResult.Entry is not null)
{
var duration = GetElapsedMilliseconds(startTimestamp);
_logger.LogDebug(
"Cache hit for evaluation {PackId}@{Version} subject {Subject} from {Source}",
request.PackId, request.Version, request.SubjectPurl, cacheResult.Source);
return CreateResponseFromCache(
request, bundle.Digest, cacheResult.Entry, cacheResult.Source, duration);
}
}
// Cache miss - perform evaluation
var document = DeserializeCompiledPolicy(bundle.Payload);
if (document is null)
{
throw new InvalidOperationException(
$"Failed to deserialize compiled policy for pack '{request.PackId}' version {request.Version}.");
}
var context = new PolicyEvaluationContext(
request.Severity,
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty),
request.Advisory,
request.Vex,
request.Sbom,
request.Exceptions,
request.Reachability,
evaluationTimestamp);
var evalRequest = new Evaluation.PolicyEvaluationRequest(document, context);
var result = _evaluator.Evaluate(evalRequest);
var correlationId = ComputeCorrelationId(bundle.Digest, subjectDigest, contextDigest);
var expiresAt = evaluationTimestamp.AddMinutes(30);
// Store in cache
var cacheEntry = new PolicyEvaluationCacheEntry(
result.Status,
result.Severity,
result.RuleName,
result.Priority,
result.Annotations,
result.Warnings,
result.AppliedException?.ExceptionId,
correlationId,
evaluationTimestamp,
expiresAt);
await _cache.SetAsync(cacheKey, cacheEntry, cancellationToken).ConfigureAwait(false);
var evalDuration = GetElapsedMilliseconds(startTimestamp);
_logger.LogDebug(
"Evaluated {PackId}@{Version} subject {Subject} in {Duration}ms - {Status}",
request.PackId, request.Version, request.SubjectPurl, evalDuration, result.Status);
return new RuntimeEvaluationResponse(
request.PackId,
request.Version,
bundle.Digest,
result.Status,
result.Severity,
result.RuleName,
result.Priority,
result.Annotations,
result.Warnings,
result.AppliedException,
correlationId,
Cached: false,
CacheSource: CacheSource.None,
EvaluationDurationMs: evalDuration);
}
/// <summary>
/// Evaluates multiple subjects in batch with caching.
/// </summary>
public async Task<IReadOnlyList<RuntimeEvaluationResponse>> EvaluateBatchAsync(
IReadOnlyList<RuntimeEvaluationRequest> requests,
CancellationToken cancellationToken)
{
if (requests.Count == 0)
{
return Array.Empty<RuntimeEvaluationResponse>();
}
var results = new List<RuntimeEvaluationResponse>(requests.Count);
// Group by pack/version for bundle loading efficiency
var groups = requests.GroupBy(r => (r.PackId, r.Version));
foreach (var group in groups)
{
var (packId, version) = group.Key;
var bundle = await _repository.GetBundleAsync(packId, version, cancellationToken)
.ConfigureAwait(false);
if (bundle is null)
{
foreach (var request in group)
{
_logger.LogWarning(
"Policy bundle not found for pack '{PackId}' version {Version}, skipping evaluation",
packId, version);
}
continue;
}
var document = DeserializeCompiledPolicy(bundle.Payload);
if (document is null)
{
_logger.LogWarning(
"Failed to deserialize policy bundle for pack '{PackId}' version {Version}",
packId, version);
continue;
}
// Build cache keys for batch lookup
var cacheKeys = new List<(RuntimeEvaluationRequest Request, PolicyEvaluationCacheKey Key)>();
foreach (var request in group)
{
var subjectDigest = ComputeSubjectDigest(request.TenantId, request.SubjectPurl, request.AdvisoryId);
var contextDigest = ComputeContextDigest(request);
var key = PolicyEvaluationCacheKey.Create(bundle.Digest, subjectDigest, contextDigest);
cacheKeys.Add((request, key));
}
// Batch cache lookup
var keyList = cacheKeys.Select(k => k.Key).ToList();
var cacheResults = await _cache.GetBatchAsync(keyList, cancellationToken).ConfigureAwait(false);
var toEvaluate = new List<(RuntimeEvaluationRequest Request, PolicyEvaluationCacheKey Key)>();
// Process cache hits
foreach (var (request, key) in cacheKeys)
{
if (!request.BypassCache && cacheResults.Found.TryGetValue(key, out var entry))
{
var response = CreateResponseFromCache(request, bundle.Digest, entry, CacheSource.InMemory, 0);
results.Add(response);
}
else
{
toEvaluate.Add((request, key));
}
}
// Evaluate cache misses
var entriesToCache = new Dictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry>();
foreach (var (request, key) in toEvaluate)
{
var startTimestamp = _timeProvider.GetTimestamp();
var evaluationTimestamp = request.EvaluationTimestamp ?? _timeProvider.GetUtcNow();
var context = new PolicyEvaluationContext(
request.Severity,
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty),
request.Advisory,
request.Vex,
request.Sbom,
request.Exceptions,
request.Reachability,
evaluationTimestamp);
var evalRequest = new Evaluation.PolicyEvaluationRequest(document, context);
var result = _evaluator.Evaluate(evalRequest);
var correlationId = ComputeCorrelationId(bundle.Digest, key.SubjectDigest, key.ContextDigest);
var expiresAt = evaluationTimestamp.AddMinutes(30);
var duration = GetElapsedMilliseconds(startTimestamp);
var cacheEntry = new PolicyEvaluationCacheEntry(
result.Status,
result.Severity,
result.RuleName,
result.Priority,
result.Annotations,
result.Warnings,
result.AppliedException?.ExceptionId,
correlationId,
evaluationTimestamp,
expiresAt);
entriesToCache[key] = cacheEntry;
results.Add(new RuntimeEvaluationResponse(
request.PackId,
request.Version,
bundle.Digest,
result.Status,
result.Severity,
result.RuleName,
result.Priority,
result.Annotations,
result.Warnings,
result.AppliedException,
correlationId,
Cached: false,
CacheSource: CacheSource.None,
EvaluationDurationMs: duration));
}
// Batch store cache entries
if (entriesToCache.Count > 0)
{
await _cache.SetBatchAsync(entriesToCache, cancellationToken).ConfigureAwait(false);
}
}
return results;
}
private static RuntimeEvaluationResponse CreateResponseFromCache(
RuntimeEvaluationRequest request,
string policyDigest,
PolicyEvaluationCacheEntry entry,
CacheSource source,
long durationMs)
{
PolicyExceptionApplication? appliedException = null;
if (entry.ExceptionId is not null)
{
// Reconstruct minimal exception application from cache
appliedException = new PolicyExceptionApplication(
entry.ExceptionId,
EffectId: "cached",
EffectType: PolicyExceptionEffectType.Suppress,
OriginalStatus: entry.Status,
OriginalSeverity: entry.Severity,
AppliedStatus: entry.Status,
AppliedSeverity: entry.Severity,
Metadata: ImmutableDictionary<string, string>.Empty);
}
return new RuntimeEvaluationResponse(
request.PackId,
request.Version,
policyDigest,
entry.Status,
entry.Severity,
entry.RuleName,
entry.Priority,
entry.Annotations,
entry.Warnings,
appliedException,
entry.CorrelationId,
Cached: true,
CacheSource: source,
EvaluationDurationMs: durationMs);
}
private static string ComputeSubjectDigest(string tenantId, string subjectPurl, string advisoryId)
{
var input = $"{tenantId}|{subjectPurl}|{advisoryId}";
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(Encoding.UTF8.GetBytes(input), hash);
return Convert.ToHexStringLower(hash);
}
private static string ComputeContextDigest(RuntimeEvaluationRequest request)
{
// Create deterministic context representation
var contextData = new
{
severity = request.Severity.Normalized,
severityScore = request.Severity.Score,
advisorySource = request.Advisory.Source,
vexCount = request.Vex.Statements.Length,
vexStatements = request.Vex.Statements.Select(s => $"{s.Status}:{s.Justification}").OrderBy(s => s).ToArray(),
sbomTags = request.Sbom.Tags.OrderBy(t => t).ToArray(),
exceptionCount = request.Exceptions.Instances.Length,
reachability = request.Reachability.State,
};
var json = JsonSerializer.Serialize(contextData, ContextSerializerOptions);
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(Encoding.UTF8.GetBytes(json), hash);
return Convert.ToHexStringLower(hash);
}
private static string ComputeCorrelationId(string policyDigest, string subjectDigest, string contextDigest)
{
var input = $"{policyDigest}|{subjectDigest}|{contextDigest}";
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(Encoding.UTF8.GetBytes(input), hash);
return Convert.ToHexString(hash);
}
private static PolicyIrDocument? DeserializeCompiledPolicy(ImmutableArray<byte> payload)
{
if (payload.IsDefaultOrEmpty)
{
return null;
}
try
{
var json = Encoding.UTF8.GetString(payload.AsSpan());
return JsonSerializer.Deserialize<PolicyIrDocument>(json);
}
catch
{
return null;
}
}
private long GetElapsedMilliseconds(long startTimestamp)
{
var elapsed = _timeProvider.GetElapsedTime(startTimestamp);
return (long)elapsed.TotalMilliseconds;
}
}

View File

@@ -0,0 +1,701 @@
using System.Collections.Immutable;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Telemetry;
/// <summary>
/// Export format for explain traces.
/// </summary>
public enum ExplainTraceFormat
{
/// <summary>JSON format.</summary>
Json,
/// <summary>NDJSON format (newline-delimited JSON).</summary>
Ndjson,
/// <summary>Human-readable text format.</summary>
Text,
/// <summary>Markdown format for documentation.</summary>
Markdown
}
/// <summary>
/// Complete explain trace for a policy evaluation.
/// </summary>
public sealed record ExplainTrace
{
/// <summary>
/// Run identifier.
/// </summary>
public required string RunId { get; init; }
/// <summary>
/// Tenant context.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Policy identifier.
/// </summary>
public required string PolicyId { get; init; }
/// <summary>
/// Policy version.
/// </summary>
public int? PolicyVersion { get; init; }
/// <summary>
/// Evaluation timestamp (deterministic).
/// </summary>
public required DateTimeOffset EvaluationTimestamp { get; init; }
/// <summary>
/// Total evaluation duration in milliseconds.
/// </summary>
public required long EvaluationDurationMs { get; init; }
/// <summary>
/// Final outcome of the evaluation.
/// </summary>
public required string FinalOutcome { get; init; }
/// <summary>
/// Input context summary.
/// </summary>
public required ExplainTraceInputContext InputContext { get; init; }
/// <summary>
/// Rule evaluation steps in order.
/// </summary>
public required ImmutableArray<ExplainTraceRuleStep> RuleSteps { get; init; }
/// <summary>
/// VEX evidence applied.
/// </summary>
public required ImmutableArray<ExplainTraceVexEvidence> VexEvidence { get; init; }
/// <summary>
/// Statistics summary.
/// </summary>
public required RuleHitStatistics Statistics { get; init; }
/// <summary>
/// Determinism hash for reproducibility verification.
/// </summary>
public string? DeterminismHash { get; init; }
/// <summary>
/// Trace metadata.
/// </summary>
public ImmutableDictionary<string, string> Metadata { get; init; } =
ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// Input context for explain trace.
/// </summary>
public sealed record ExplainTraceInputContext
{
/// <summary>
/// Component PURL.
/// </summary>
public string? ComponentPurl { get; init; }
/// <summary>
/// Component name.
/// </summary>
public string? ComponentName { get; init; }
/// <summary>
/// Component version.
/// </summary>
public string? ComponentVersion { get; init; }
/// <summary>
/// Advisory ID.
/// </summary>
public string? AdvisoryId { get; init; }
/// <summary>
/// Vulnerability ID.
/// </summary>
public string? VulnerabilityId { get; init; }
/// <summary>
/// Input severity.
/// </summary>
public string? InputSeverity { get; init; }
/// <summary>
/// Input CVSS score.
/// </summary>
public decimal? InputCvssScore { get; init; }
/// <summary>
/// Environment variables available.
/// </summary>
public ImmutableDictionary<string, string> Environment { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// SBOM tags.
/// </summary>
public ImmutableArray<string> SbomTags { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Reachability state.
/// </summary>
public string? ReachabilityState { get; init; }
/// <summary>
/// Reachability confidence.
/// </summary>
public double? ReachabilityConfidence { get; init; }
}
/// <summary>
/// A single rule evaluation step in the explain trace.
/// </summary>
public sealed record ExplainTraceRuleStep
{
/// <summary>
/// Step number (1-based).
/// </summary>
public required int StepNumber { get; init; }
/// <summary>
/// Rule name.
/// </summary>
public required string RuleName { get; init; }
/// <summary>
/// Rule priority.
/// </summary>
public int RulePriority { get; init; }
/// <summary>
/// Rule category.
/// </summary>
public string? RuleCategory { get; init; }
/// <summary>
/// Expression that was evaluated.
/// </summary>
public string? Expression { get; init; }
/// <summary>
/// Whether the expression matched.
/// </summary>
public required bool Matched { get; init; }
/// <summary>
/// Outcome if the rule matched.
/// </summary>
public string? Outcome { get; init; }
/// <summary>
/// Assigned severity if the rule matched.
/// </summary>
public string? AssignedSeverity { get; init; }
/// <summary>
/// Whether this was the final matching rule.
/// </summary>
public bool IsFinalMatch { get; init; }
/// <summary>
/// Why the rule did or did not match.
/// </summary>
public string? Explanation { get; init; }
/// <summary>
/// Evaluation time in microseconds.
/// </summary>
public long EvaluationMicroseconds { get; init; }
/// <summary>
/// Intermediate values during evaluation.
/// </summary>
public ImmutableDictionary<string, string> IntermediateValues { get; init; } =
ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// VEX evidence in the explain trace.
/// </summary>
public sealed record ExplainTraceVexEvidence
{
/// <summary>
/// VEX provider/vendor.
/// </summary>
public required string Vendor { get; init; }
/// <summary>
/// VEX status.
/// </summary>
public required string Status { get; init; }
/// <summary>
/// VEX justification.
/// </summary>
public string? Justification { get; init; }
/// <summary>
/// Confidence score.
/// </summary>
public double? Confidence { get; init; }
/// <summary>
/// Whether this VEX was applied.
/// </summary>
public required bool WasApplied { get; init; }
/// <summary>
/// Why the VEX was or was not applied.
/// </summary>
public string? Explanation { get; init; }
}
/// <summary>
/// Service for building and exporting explain traces.
/// </summary>
public sealed class ExplainTraceExportService
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true
};
private static readonly JsonSerializerOptions NdjsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
/// <summary>
/// Exports an explain trace to the specified format.
/// </summary>
public string Export(ExplainTrace trace, ExplainTraceFormat format)
{
return format switch
{
ExplainTraceFormat.Json => ExportJson(trace),
ExplainTraceFormat.Ndjson => ExportNdjson(trace),
ExplainTraceFormat.Text => ExportText(trace),
ExplainTraceFormat.Markdown => ExportMarkdown(trace),
_ => throw new ArgumentOutOfRangeException(nameof(format))
};
}
/// <summary>
/// Exports to JSON format.
/// </summary>
public string ExportJson(ExplainTrace trace)
{
return JsonSerializer.Serialize(trace, JsonOptions);
}
/// <summary>
/// Exports to NDJSON format (each rule step on its own line).
/// </summary>
public string ExportNdjson(ExplainTrace trace)
{
var builder = new StringBuilder();
// Header line
var header = new
{
type = "header",
run_id = trace.RunId,
tenant_id = trace.TenantId,
policy_id = trace.PolicyId,
policy_version = trace.PolicyVersion,
evaluation_timestamp = trace.EvaluationTimestamp,
final_outcome = trace.FinalOutcome
};
builder.AppendLine(JsonSerializer.Serialize(header, NdjsonOptions));
// Input context line
var context = new { type = "context", context = trace.InputContext };
builder.AppendLine(JsonSerializer.Serialize(context, NdjsonOptions));
// Rule steps
foreach (var step in trace.RuleSteps)
{
var stepRecord = new { type = "rule_step", step };
builder.AppendLine(JsonSerializer.Serialize(stepRecord, NdjsonOptions));
}
// VEX evidence
foreach (var vex in trace.VexEvidence)
{
var vexRecord = new { type = "vex_evidence", evidence = vex };
builder.AppendLine(JsonSerializer.Serialize(vexRecord, NdjsonOptions));
}
// Statistics line
var stats = new { type = "statistics", statistics = trace.Statistics };
builder.AppendLine(JsonSerializer.Serialize(stats, NdjsonOptions));
return builder.ToString();
}
/// <summary>
/// Exports to human-readable text format.
/// </summary>
public string ExportText(ExplainTrace trace)
{
var builder = new StringBuilder();
builder.AppendLine("================================================================================");
builder.AppendLine("POLICY EVALUATION EXPLAIN TRACE");
builder.AppendLine("================================================================================");
builder.AppendLine();
builder.AppendLine("RUN INFORMATION:");
builder.AppendLine($" Run ID: {trace.RunId}");
builder.AppendLine($" Tenant: {trace.TenantId}");
builder.AppendLine($" Policy: {trace.PolicyId}");
if (trace.PolicyVersion.HasValue)
{
builder.AppendLine($" Policy Version: {trace.PolicyVersion}");
}
builder.AppendLine($" Evaluation Time: {trace.EvaluationTimestamp:O}");
builder.AppendLine($" Duration: {trace.EvaluationDurationMs}ms");
builder.AppendLine($" Final Outcome: {trace.FinalOutcome}");
builder.AppendLine();
builder.AppendLine("INPUT CONTEXT:");
if (!string.IsNullOrWhiteSpace(trace.InputContext.ComponentPurl))
{
builder.AppendLine($" Component PURL: {trace.InputContext.ComponentPurl}");
}
if (!string.IsNullOrWhiteSpace(trace.InputContext.VulnerabilityId))
{
builder.AppendLine($" Vulnerability: {trace.InputContext.VulnerabilityId}");
}
if (!string.IsNullOrWhiteSpace(trace.InputContext.InputSeverity))
{
builder.AppendLine($" Input Severity: {trace.InputContext.InputSeverity}");
}
if (trace.InputContext.InputCvssScore.HasValue)
{
builder.AppendLine($" CVSS Score: {trace.InputContext.InputCvssScore:F1}");
}
if (!string.IsNullOrWhiteSpace(trace.InputContext.ReachabilityState))
{
builder.AppendLine($" Reachability: {trace.InputContext.ReachabilityState} ({trace.InputContext.ReachabilityConfidence:P0})");
}
builder.AppendLine();
builder.AppendLine("RULE EVALUATION STEPS:");
builder.AppendLine("--------------------------------------------------------------------------------");
foreach (var step in trace.RuleSteps)
{
var matchIndicator = step.Matched ? "[MATCH]" : "[ ]";
var finalIndicator = step.IsFinalMatch ? " *FINAL*" : "";
builder.AppendLine($" {step.StepNumber,3}. {matchIndicator} {step.RuleName}{finalIndicator}");
builder.AppendLine($" Priority: {step.RulePriority}");
if (!string.IsNullOrWhiteSpace(step.Expression))
{
var expr = step.Expression.Length > 60
? step.Expression[..57] + "..."
: step.Expression;
builder.AppendLine($" Expression: {expr}");
}
if (step.Matched)
{
builder.AppendLine($" Outcome: {step.Outcome}");
if (!string.IsNullOrWhiteSpace(step.AssignedSeverity))
{
builder.AppendLine($" Severity: {step.AssignedSeverity}");
}
}
if (!string.IsNullOrWhiteSpace(step.Explanation))
{
builder.AppendLine($" Reason: {step.Explanation}");
}
builder.AppendLine();
}
if (!trace.VexEvidence.IsDefaultOrEmpty)
{
builder.AppendLine("VEX EVIDENCE:");
builder.AppendLine("--------------------------------------------------------------------------------");
foreach (var vex in trace.VexEvidence)
{
var appliedIndicator = vex.WasApplied ? "[APPLIED]" : "[IGNORED]";
builder.AppendLine($" {appliedIndicator} {vex.Vendor}: {vex.Status}");
if (!string.IsNullOrWhiteSpace(vex.Justification))
{
builder.AppendLine($" Justification: {vex.Justification}");
}
if (!string.IsNullOrWhiteSpace(vex.Explanation))
{
builder.AppendLine($" Reason: {vex.Explanation}");
}
}
builder.AppendLine();
}
builder.AppendLine("STATISTICS:");
builder.AppendLine("--------------------------------------------------------------------------------");
builder.AppendLine($" Rules Evaluated: {trace.Statistics.TotalRulesEvaluated}");
builder.AppendLine($" Rules Fired: {trace.Statistics.TotalRulesFired}");
builder.AppendLine($" VEX Overrides: {trace.Statistics.TotalVexOverrides}");
builder.AppendLine($" Total Duration: {trace.Statistics.TotalEvaluationMs}ms");
builder.AppendLine($" Avg Rule Time: {trace.Statistics.AverageRuleEvaluationMicroseconds:F1}us");
builder.AppendLine();
if (!string.IsNullOrWhiteSpace(trace.DeterminismHash))
{
builder.AppendLine($"Determinism Hash: {trace.DeterminismHash}");
}
builder.AppendLine("================================================================================");
return builder.ToString();
}
/// <summary>
/// Exports to Markdown format.
/// </summary>
public string ExportMarkdown(ExplainTrace trace)
{
var builder = new StringBuilder();
builder.AppendLine("# Policy Evaluation Explain Trace");
builder.AppendLine();
builder.AppendLine("## Run Information");
builder.AppendLine();
builder.AppendLine("| Property | Value |");
builder.AppendLine("|----------|-------|");
builder.AppendLine($"| Run ID | `{trace.RunId}` |");
builder.AppendLine($"| Tenant | `{trace.TenantId}` |");
builder.AppendLine($"| Policy | `{trace.PolicyId}` |");
if (trace.PolicyVersion.HasValue)
{
builder.AppendLine($"| Version | `{trace.PolicyVersion}` |");
}
builder.AppendLine($"| Evaluation Time | `{trace.EvaluationTimestamp:O}` |");
builder.AppendLine($"| Duration | {trace.EvaluationDurationMs}ms |");
builder.AppendLine($"| **Final Outcome** | **{trace.FinalOutcome}** |");
builder.AppendLine();
builder.AppendLine("## Input Context");
builder.AppendLine();
if (!string.IsNullOrWhiteSpace(trace.InputContext.ComponentPurl))
{
builder.AppendLine($"- **Component**: `{trace.InputContext.ComponentPurl}`");
}
if (!string.IsNullOrWhiteSpace(trace.InputContext.VulnerabilityId))
{
builder.AppendLine($"- **Vulnerability**: `{trace.InputContext.VulnerabilityId}`");
}
if (!string.IsNullOrWhiteSpace(trace.InputContext.InputSeverity))
{
builder.AppendLine($"- **Severity**: {trace.InputContext.InputSeverity}");
}
if (trace.InputContext.InputCvssScore.HasValue)
{
builder.AppendLine($"- **CVSS Score**: {trace.InputContext.InputCvssScore:F1}");
}
if (!string.IsNullOrWhiteSpace(trace.InputContext.ReachabilityState))
{
builder.AppendLine($"- **Reachability**: {trace.InputContext.ReachabilityState} ({trace.InputContext.ReachabilityConfidence:P0} confidence)");
}
builder.AppendLine();
builder.AppendLine("## Rule Evaluation Steps");
builder.AppendLine();
builder.AppendLine("| # | Rule | Priority | Matched | Outcome | Severity |");
builder.AppendLine("|---|------|----------|---------|---------|----------|");
foreach (var step in trace.RuleSteps)
{
var matched = step.Matched ? (step.IsFinalMatch ? "**YES** (final)" : "YES") : "no";
var outcome = step.Matched ? step.Outcome ?? "-" : "-";
var severity = step.AssignedSeverity ?? "-";
builder.AppendLine($"| {step.StepNumber} | `{step.RuleName}` | {step.RulePriority} | {matched} | {outcome} | {severity} |");
}
builder.AppendLine();
if (!trace.VexEvidence.IsDefaultOrEmpty)
{
builder.AppendLine("## VEX Evidence");
builder.AppendLine();
builder.AppendLine("| Vendor | Status | Applied | Justification |");
builder.AppendLine("|--------|--------|---------|---------------|");
foreach (var vex in trace.VexEvidence)
{
var applied = vex.WasApplied ? "**YES**" : "no";
var justification = vex.Justification ?? "-";
builder.AppendLine($"| {vex.Vendor} | {vex.Status} | {applied} | {justification} |");
}
builder.AppendLine();
}
builder.AppendLine("## Statistics");
builder.AppendLine();
builder.AppendLine($"- **Rules Evaluated**: {trace.Statistics.TotalRulesEvaluated}");
builder.AppendLine($"- **Rules Fired**: {trace.Statistics.TotalRulesFired}");
builder.AppendLine($"- **VEX Overrides**: {trace.Statistics.TotalVexOverrides}");
builder.AppendLine($"- **Total Duration**: {trace.Statistics.TotalEvaluationMs}ms");
builder.AppendLine($"- **Avg Rule Time**: {trace.Statistics.AverageRuleEvaluationMicroseconds:F1}μs");
builder.AppendLine();
if (!string.IsNullOrWhiteSpace(trace.DeterminismHash))
{
builder.AppendLine("---");
builder.AppendLine($"*Determinism Hash: `{trace.DeterminismHash}`*");
}
return builder.ToString();
}
}
/// <summary>
/// Builder for constructing explain traces from evaluation results.
/// </summary>
public sealed class ExplainTraceBuilder
{
private string? _runId;
private string? _tenantId;
private string? _policyId;
private int? _policyVersion;
private DateTimeOffset _evaluationTimestamp;
private long _evaluationDurationMs;
private string? _finalOutcome;
private ExplainTraceInputContext? _inputContext;
private readonly List<ExplainTraceRuleStep> _ruleSteps = new();
private readonly List<ExplainTraceVexEvidence> _vexEvidence = new();
private RuleHitStatistics? _statistics;
private string? _determinismHash;
private readonly Dictionary<string, string> _metadata = new();
public ExplainTraceBuilder WithRunId(string runId)
{
_runId = runId;
return this;
}
public ExplainTraceBuilder WithTenant(string tenantId)
{
_tenantId = tenantId;
return this;
}
public ExplainTraceBuilder WithPolicy(string policyId, int? version = null)
{
_policyId = policyId;
_policyVersion = version;
return this;
}
public ExplainTraceBuilder WithEvaluationTimestamp(DateTimeOffset timestamp)
{
_evaluationTimestamp = timestamp;
return this;
}
public ExplainTraceBuilder WithDuration(long milliseconds)
{
_evaluationDurationMs = milliseconds;
return this;
}
public ExplainTraceBuilder WithFinalOutcome(string outcome)
{
_finalOutcome = outcome;
return this;
}
public ExplainTraceBuilder WithInputContext(ExplainTraceInputContext context)
{
_inputContext = context;
return this;
}
public ExplainTraceBuilder AddRuleStep(ExplainTraceRuleStep step)
{
_ruleSteps.Add(step);
return this;
}
public ExplainTraceBuilder AddVexEvidence(ExplainTraceVexEvidence evidence)
{
_vexEvidence.Add(evidence);
return this;
}
public ExplainTraceBuilder WithStatistics(RuleHitStatistics statistics)
{
_statistics = statistics;
return this;
}
public ExplainTraceBuilder WithDeterminismHash(string hash)
{
_determinismHash = hash;
return this;
}
public ExplainTraceBuilder AddMetadata(string key, string value)
{
_metadata[key] = value;
return this;
}
public ExplainTrace Build()
{
if (string.IsNullOrWhiteSpace(_runId))
throw new InvalidOperationException("Run ID is required");
if (string.IsNullOrWhiteSpace(_tenantId))
throw new InvalidOperationException("Tenant ID is required");
if (string.IsNullOrWhiteSpace(_policyId))
throw new InvalidOperationException("Policy ID is required");
if (string.IsNullOrWhiteSpace(_finalOutcome))
throw new InvalidOperationException("Final outcome is required");
_inputContext ??= new ExplainTraceInputContext();
_statistics ??= new RuleHitStatistics
{
RunId = _runId,
PolicyId = _policyId,
TotalRulesEvaluated = _ruleSteps.Count,
TotalRulesFired = _ruleSteps.Count(s => s.Matched),
TotalVexOverrides = _vexEvidence.Count(v => v.WasApplied),
RulesFiredByCategory = ImmutableDictionary<string, int>.Empty,
RulesFiredByOutcome = ImmutableDictionary<string, int>.Empty,
VexOverridesByVendor = ImmutableDictionary<string, int>.Empty,
VexOverridesByStatus = ImmutableDictionary<string, int>.Empty,
TopRulesByHitCount = ImmutableArray<RuleHitCount>.Empty,
TotalEvaluationMs = _evaluationDurationMs
};
return new ExplainTrace
{
RunId = _runId,
TenantId = _tenantId,
PolicyId = _policyId,
PolicyVersion = _policyVersion,
EvaluationTimestamp = _evaluationTimestamp,
EvaluationDurationMs = _evaluationDurationMs,
FinalOutcome = _finalOutcome,
InputContext = _inputContext,
RuleSteps = _ruleSteps.ToImmutableArray(),
VexEvidence = _vexEvidence.ToImmutableArray(),
Statistics = _statistics,
DeterminismHash = _determinismHash,
Metadata = _metadata.ToImmutableDictionary()
};
}
}

View File

@@ -0,0 +1,424 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Telemetry;
/// <summary>
/// Represents a structured trace record for a policy rule hit.
/// </summary>
public sealed record RuleHitTrace
{
/// <summary>
/// Unique trace identifier.
/// </summary>
public required string TraceId { get; init; }
/// <summary>
/// Span identifier within the trace.
/// </summary>
public required string SpanId { get; init; }
/// <summary>
/// Parent span identifier (if any).
/// </summary>
public string? ParentSpanId { get; init; }
/// <summary>
/// Tenant context.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Policy identifier.
/// </summary>
public required string PolicyId { get; init; }
/// <summary>
/// Policy version.
/// </summary>
public int? PolicyVersion { get; init; }
/// <summary>
/// Run identifier.
/// </summary>
public required string RunId { get; init; }
/// <summary>
/// Rule that fired.
/// </summary>
public required string RuleName { get; init; }
/// <summary>
/// Rule priority (lower = higher priority).
/// </summary>
public int RulePriority { get; init; }
/// <summary>
/// Rule category/type.
/// </summary>
public string? RuleCategory { get; init; }
/// <summary>
/// Outcome of the rule (allow, deny, suppress, etc.).
/// </summary>
public required string Outcome { get; init; }
/// <summary>
/// Severity assigned by the rule.
/// </summary>
public string? AssignedSeverity { get; init; }
/// <summary>
/// Component PURL that triggered the rule.
/// </summary>
public string? ComponentPurl { get; init; }
/// <summary>
/// Advisory ID that triggered the rule.
/// </summary>
public string? AdvisoryId { get; init; }
/// <summary>
/// Vulnerability ID (CVE, GHSA, etc.).
/// </summary>
public string? VulnerabilityId { get; init; }
/// <summary>
/// VEX status that influenced the rule (if any).
/// </summary>
public string? VexStatus { get; init; }
/// <summary>
/// VEX justification (if VEX was applied).
/// </summary>
public string? VexJustification { get; init; }
/// <summary>
/// VEX vendor that provided the status.
/// </summary>
public string? VexVendor { get; init; }
/// <summary>
/// Whether this was a VEX override.
/// </summary>
public bool IsVexOverride { get; init; }
/// <summary>
/// Input CVSS score (if applicable).
/// </summary>
public decimal? InputCvssScore { get; init; }
/// <summary>
/// Reachability state (if applicable).
/// </summary>
public string? ReachabilityState { get; init; }
/// <summary>
/// Reachability confidence (0.0-1.0).
/// </summary>
public double? ReachabilityConfidence { get; init; }
/// <summary>
/// Expression that was evaluated.
/// </summary>
public string? Expression { get; init; }
/// <summary>
/// Expression evaluation result.
/// </summary>
public bool ExpressionResult { get; init; }
/// <summary>
/// Evaluation timestamp (deterministic).
/// </summary>
public required DateTimeOffset EvaluationTimestamp { get; init; }
/// <summary>
/// Wall-clock timestamp when trace was recorded.
/// </summary>
public required DateTimeOffset RecordedAt { get; init; }
/// <summary>
/// Evaluation duration in microseconds.
/// </summary>
public long EvaluationMicroseconds { get; init; }
/// <summary>
/// Whether this trace was sampled (vs. full capture).
/// </summary>
public bool IsSampled { get; init; }
/// <summary>
/// Additional context attributes.
/// </summary>
public ImmutableDictionary<string, string> Attributes { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Creates a trace ID from the current activity or generates a new one.
/// </summary>
public static string GetOrCreateTraceId()
{
var activity = Activity.Current;
if (activity is not null)
{
return activity.TraceId.ToString();
}
Span<byte> bytes = stackalloc byte[16];
RandomNumberGenerator.Fill(bytes);
return Convert.ToHexStringLower(bytes);
}
/// <summary>
/// Creates a span ID from the current activity or generates a new one.
/// </summary>
public static string GetOrCreateSpanId()
{
var activity = Activity.Current;
if (activity is not null)
{
return activity.SpanId.ToString();
}
Span<byte> bytes = stackalloc byte[8];
RandomNumberGenerator.Fill(bytes);
return Convert.ToHexStringLower(bytes);
}
}
/// <summary>
/// Aggregated rule hit statistics for a policy run.
/// </summary>
public sealed record RuleHitStatistics
{
/// <summary>
/// Run identifier.
/// </summary>
public required string RunId { get; init; }
/// <summary>
/// Policy identifier.
/// </summary>
public required string PolicyId { get; init; }
/// <summary>
/// Total rules evaluated.
/// </summary>
public required int TotalRulesEvaluated { get; init; }
/// <summary>
/// Total rules that fired (matched).
/// </summary>
public required int TotalRulesFired { get; init; }
/// <summary>
/// Total VEX overrides applied.
/// </summary>
public required int TotalVexOverrides { get; init; }
/// <summary>
/// Rules fired by category.
/// </summary>
public required ImmutableDictionary<string, int> RulesFiredByCategory { get; init; }
/// <summary>
/// Rules fired by outcome.
/// </summary>
public required ImmutableDictionary<string, int> RulesFiredByOutcome { get; init; }
/// <summary>
/// VEX overrides by vendor.
/// </summary>
public required ImmutableDictionary<string, int> VexOverridesByVendor { get; init; }
/// <summary>
/// VEX overrides by status.
/// </summary>
public required ImmutableDictionary<string, int> VexOverridesByStatus { get; init; }
/// <summary>
/// Top rules by hit count.
/// </summary>
public required ImmutableArray<RuleHitCount> TopRulesByHitCount { get; init; }
/// <summary>
/// Total evaluation duration in milliseconds.
/// </summary>
public required long TotalEvaluationMs { get; init; }
/// <summary>
/// Average rule evaluation time in microseconds.
/// </summary>
public double AverageRuleEvaluationMicroseconds =>
TotalRulesEvaluated > 0 ? (double)TotalEvaluationMs * 1000 / TotalRulesEvaluated : 0;
}
/// <summary>
/// Rule hit count entry.
/// </summary>
public sealed record RuleHitCount(string RuleName, int HitCount, string Outcome);
/// <summary>
/// Factory for creating rule hit traces.
/// </summary>
public static class RuleHitTraceFactory
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
/// <summary>
/// Creates a rule hit trace from evaluation context.
/// </summary>
public static RuleHitTrace Create(
string tenantId,
string policyId,
int? policyVersion,
string runId,
string ruleName,
int rulePriority,
string outcome,
DateTimeOffset evaluationTimestamp,
TimeProvider? timeProvider = null,
string? ruleCategory = null,
string? assignedSeverity = null,
string? componentPurl = null,
string? advisoryId = null,
string? vulnerabilityId = null,
string? vexStatus = null,
string? vexJustification = null,
string? vexVendor = null,
bool isVexOverride = false,
decimal? inputCvssScore = null,
string? reachabilityState = null,
double? reachabilityConfidence = null,
string? expression = null,
bool expressionResult = false,
long evaluationMicroseconds = 0,
bool isSampled = false,
ImmutableDictionary<string, string>? attributes = null)
{
var time = timeProvider ?? TimeProvider.System;
var traceId = RuleHitTrace.GetOrCreateTraceId();
var spanId = RuleHitTrace.GetOrCreateSpanId();
var parentSpanId = Activity.Current?.ParentSpanId.ToString();
return new RuleHitTrace
{
TraceId = traceId,
SpanId = spanId,
ParentSpanId = parentSpanId,
TenantId = tenantId.ToLowerInvariant(),
PolicyId = policyId,
PolicyVersion = policyVersion,
RunId = runId,
RuleName = ruleName,
RulePriority = rulePriority,
RuleCategory = ruleCategory,
Outcome = outcome,
AssignedSeverity = assignedSeverity,
ComponentPurl = componentPurl,
AdvisoryId = advisoryId,
VulnerabilityId = vulnerabilityId,
VexStatus = vexStatus,
VexJustification = vexJustification,
VexVendor = vexVendor,
IsVexOverride = isVexOverride,
InputCvssScore = inputCvssScore,
ReachabilityState = reachabilityState,
ReachabilityConfidence = reachabilityConfidence,
Expression = expression,
ExpressionResult = expressionResult,
EvaluationTimestamp = evaluationTimestamp,
RecordedAt = time.GetUtcNow(),
EvaluationMicroseconds = evaluationMicroseconds,
IsSampled = isSampled,
Attributes = attributes ?? ImmutableDictionary<string, string>.Empty
};
}
/// <summary>
/// Serializes a rule hit trace to JSON.
/// </summary>
public static string ToJson(RuleHitTrace trace)
{
return JsonSerializer.Serialize(trace, JsonOptions);
}
/// <summary>
/// Serializes multiple rule hit traces to NDJSON.
/// </summary>
public static string ToNdjson(IEnumerable<RuleHitTrace> traces)
{
var builder = new StringBuilder();
foreach (var trace in traces)
{
builder.AppendLine(JsonSerializer.Serialize(trace, JsonOptions));
}
return builder.ToString();
}
/// <summary>
/// Creates aggregated statistics from a collection of traces.
/// </summary>
public static RuleHitStatistics CreateStatistics(
string runId,
string policyId,
IEnumerable<RuleHitTrace> traces,
int totalRulesEvaluated,
long totalEvaluationMs)
{
var traceList = traces.ToList();
var rulesFiredByCategory = traceList
.Where(t => !string.IsNullOrWhiteSpace(t.RuleCategory))
.GroupBy(t => t.RuleCategory!)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var rulesFiredByOutcome = traceList
.GroupBy(t => t.Outcome)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var vexOverrides = traceList.Where(t => t.IsVexOverride).ToList();
var vexOverridesByVendor = vexOverrides
.Where(t => !string.IsNullOrWhiteSpace(t.VexVendor))
.GroupBy(t => t.VexVendor!)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var vexOverridesByStatus = vexOverrides
.Where(t => !string.IsNullOrWhiteSpace(t.VexStatus))
.GroupBy(t => t.VexStatus!)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var topRules = traceList
.GroupBy(t => (t.RuleName, t.Outcome))
.Select(g => new RuleHitCount(g.Key.RuleName, g.Count(), g.Key.Outcome))
.OrderByDescending(r => r.HitCount)
.Take(10)
.ToImmutableArray();
return new RuleHitStatistics
{
RunId = runId,
PolicyId = policyId,
TotalRulesEvaluated = totalRulesEvaluated,
TotalRulesFired = traceList.Count,
TotalVexOverrides = vexOverrides.Count,
RulesFiredByCategory = rulesFiredByCategory,
RulesFiredByOutcome = rulesFiredByOutcome,
VexOverridesByVendor = vexOverridesByVendor,
VexOverridesByStatus = vexOverridesByStatus,
TopRulesByHitCount = topRules,
TotalEvaluationMs = totalEvaluationMs
};
}
}

View File

@@ -0,0 +1,553 @@
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.Telemetry;
/// <summary>
/// Configuration for rule hit trace sampling.
/// </summary>
public sealed record RuleHitSamplingOptions
{
/// <summary>
/// Base sampling rate (0.0 to 1.0). Default is 0.1 (10%).
/// </summary>
public double BaseSamplingRate { get; init; } = 0.1;
/// <summary>
/// Sampling rate for VEX overrides (usually higher). Default is 1.0 (100%).
/// </summary>
public double VexOverrideSamplingRate { get; init; } = 1.0;
/// <summary>
/// Sampling rate for high-severity outcomes. Default is 0.5 (50%).
/// </summary>
public double HighSeveritySamplingRate { get; init; } = 0.5;
/// <summary>
/// Sampling rate during incident mode. Default is 1.0 (100%).
/// </summary>
public double IncidentModeSamplingRate { get; init; } = 1.0;
/// <summary>
/// Maximum traces to buffer per run before flushing.
/// </summary>
public int MaxBufferSizePerRun { get; init; } = 1000;
/// <summary>
/// Maximum total traces to buffer before forced flush.
/// </summary>
public int MaxTotalBufferSize { get; init; } = 10000;
/// <summary>
/// Whether to include full expression text in traces.
/// </summary>
public bool IncludeExpressions { get; init; } = true;
/// <summary>
/// Maximum expression length to include (truncated if longer).
/// </summary>
public int MaxExpressionLength { get; init; } = 500;
/// <summary>
/// High-severity outcomes that trigger elevated sampling.
/// </summary>
public ImmutableHashSet<string> HighSeverityOutcomes { get; init; } =
ImmutableHashSet.Create(StringComparer.OrdinalIgnoreCase, "deny", "block", "critical", "high");
/// <summary>
/// Rules to always sample (by name pattern).
/// </summary>
public ImmutableArray<string> AlwaysSampleRules { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Default options.
/// </summary>
public static RuleHitSamplingOptions Default { get; } = new();
/// <summary>
/// Full sampling (for debugging/testing).
/// </summary>
public static RuleHitSamplingOptions FullSampling { get; } = new()
{
BaseSamplingRate = 1.0,
VexOverrideSamplingRate = 1.0,
HighSeveritySamplingRate = 1.0
};
}
/// <summary>
/// Interface for rule hit trace collection.
/// </summary>
public interface IRuleHitTraceCollector
{
/// <summary>
/// Records a rule hit trace.
/// </summary>
void Record(RuleHitTrace trace);
/// <summary>
/// Gets all traces for a run.
/// </summary>
IReadOnlyList<RuleHitTrace> GetTraces(string runId);
/// <summary>
/// Gets statistics for a run.
/// </summary>
RuleHitStatistics? GetStatistics(string runId);
/// <summary>
/// Flushes traces for a run.
/// </summary>
Task FlushAsync(string runId, CancellationToken cancellationToken = default);
/// <summary>
/// Completes a run and returns final statistics.
/// </summary>
RuleHitStatistics CompleteRun(string runId, int totalRulesEvaluated, long totalEvaluationMs);
}
/// <summary>
/// Interface for exporting rule hit traces.
/// </summary>
public interface IRuleHitTraceExporter
{
/// <summary>
/// Exports traces for a run.
/// </summary>
Task ExportAsync(
string runId,
IReadOnlyList<RuleHitTrace> traces,
RuleHitStatistics statistics,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Collects and manages rule hit traces with sampling controls.
/// </summary>
public sealed class RuleHitTraceCollector : IRuleHitTraceCollector, IDisposable
{
private readonly RuleHitSamplingOptions _options;
private readonly TimeProvider _timeProvider;
private readonly IReadOnlyList<IRuleHitTraceExporter> _exporters;
private readonly ConcurrentDictionary<string, RunTraceBuffer> _runBuffers = new();
private readonly Random _sampler;
private readonly object _samplerLock = new();
private volatile bool _incidentMode;
private bool _disposed;
public RuleHitTraceCollector(
RuleHitSamplingOptions? options = null,
TimeProvider? timeProvider = null,
IEnumerable<IRuleHitTraceExporter>? exporters = null)
{
_options = options ?? RuleHitSamplingOptions.Default;
_timeProvider = timeProvider ?? TimeProvider.System;
_exporters = exporters?.ToList() ?? new List<IRuleHitTraceExporter>();
_sampler = new Random();
}
/// <summary>
/// Enables or disables incident mode (100% sampling).
/// </summary>
public bool IncidentMode
{
get => _incidentMode;
set => _incidentMode = value;
}
/// <summary>
/// Records a rule hit trace with sampling.
/// </summary>
public void Record(RuleHitTrace trace)
{
ArgumentNullException.ThrowIfNull(trace);
// Determine if this trace should be sampled
if (!ShouldSample(trace))
{
// Still record metrics even if not sampled
RecordMetrics(trace);
return;
}
var buffer = _runBuffers.GetOrAdd(trace.RunId, _ => new RunTraceBuffer());
buffer.Add(trace with { IsSampled = true });
// Record metrics
RecordMetrics(trace);
// Check if we need to force flush
if (buffer.Count >= _options.MaxBufferSizePerRun)
{
// Async flush without blocking
_ = FlushAsync(trace.RunId, CancellationToken.None);
}
}
/// <summary>
/// Gets all sampled traces for a run.
/// </summary>
public IReadOnlyList<RuleHitTrace> GetTraces(string runId)
{
if (_runBuffers.TryGetValue(runId, out var buffer))
{
return buffer.GetTraces();
}
return Array.Empty<RuleHitTrace>();
}
/// <summary>
/// Gets current statistics for a run.
/// </summary>
public RuleHitStatistics? GetStatistics(string runId)
{
if (!_runBuffers.TryGetValue(runId, out var buffer))
{
return null;
}
var traces = buffer.GetTraces();
return RuleHitTraceFactory.CreateStatistics(
runId,
traces.FirstOrDefault()?.PolicyId ?? "unknown",
traces,
buffer.TotalRulesEvaluated,
buffer.TotalEvaluationMs);
}
/// <summary>
/// Flushes traces for a run to exporters.
/// </summary>
public async Task FlushAsync(string runId, CancellationToken cancellationToken = default)
{
if (!_runBuffers.TryGetValue(runId, out var buffer))
{
return;
}
var traces = buffer.FlushAndGet();
if (traces.Count == 0)
{
return;
}
var statistics = RuleHitTraceFactory.CreateStatistics(
runId,
traces.FirstOrDefault()?.PolicyId ?? "unknown",
traces,
buffer.TotalRulesEvaluated,
buffer.TotalEvaluationMs);
foreach (var exporter in _exporters)
{
try
{
await exporter.ExportAsync(runId, traces, statistics, cancellationToken)
.ConfigureAwait(false);
}
catch (OperationCanceledException)
{
throw;
}
catch
{
// Log but don't fail
}
}
}
/// <summary>
/// Completes a run and returns final statistics.
/// </summary>
public RuleHitStatistics CompleteRun(string runId, int totalRulesEvaluated, long totalEvaluationMs)
{
if (!_runBuffers.TryRemove(runId, out var buffer))
{
return new RuleHitStatistics
{
RunId = runId,
PolicyId = "unknown",
TotalRulesEvaluated = totalRulesEvaluated,
TotalRulesFired = 0,
TotalVexOverrides = 0,
RulesFiredByCategory = ImmutableDictionary<string, int>.Empty,
RulesFiredByOutcome = ImmutableDictionary<string, int>.Empty,
VexOverridesByVendor = ImmutableDictionary<string, int>.Empty,
VexOverridesByStatus = ImmutableDictionary<string, int>.Empty,
TopRulesByHitCount = ImmutableArray<RuleHitCount>.Empty,
TotalEvaluationMs = totalEvaluationMs
};
}
buffer.TotalRulesEvaluated = totalRulesEvaluated;
buffer.TotalEvaluationMs = totalEvaluationMs;
var traces = buffer.GetTraces();
return RuleHitTraceFactory.CreateStatistics(
runId,
traces.FirstOrDefault()?.PolicyId ?? "unknown",
traces,
totalRulesEvaluated,
totalEvaluationMs);
}
private bool ShouldSample(RuleHitTrace trace)
{
// Incident mode = 100% sampling
if (_incidentMode)
{
return true;
}
// Check always-sample rules
if (!_options.AlwaysSampleRules.IsDefaultOrEmpty)
{
foreach (var pattern in _options.AlwaysSampleRules)
{
if (trace.RuleName.Contains(pattern, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
}
// VEX overrides get elevated sampling
if (trace.IsVexOverride)
{
return Sample(_options.VexOverrideSamplingRate);
}
// High-severity outcomes get elevated sampling
if (_options.HighSeverityOutcomes.Contains(trace.Outcome))
{
return Sample(_options.HighSeveritySamplingRate);
}
if (!string.IsNullOrWhiteSpace(trace.AssignedSeverity) &&
_options.HighSeverityOutcomes.Contains(trace.AssignedSeverity))
{
return Sample(_options.HighSeveritySamplingRate);
}
// Base sampling rate
return Sample(_options.BaseSamplingRate);
}
private bool Sample(double rate)
{
if (rate >= 1.0)
{
return true;
}
if (rate <= 0.0)
{
return false;
}
lock (_samplerLock)
{
return _sampler.NextDouble() < rate;
}
}
private static void RecordMetrics(RuleHitTrace trace)
{
// Record to existing telemetry counters
PolicyEngineTelemetry.RecordRuleFired(trace.PolicyId, trace.RuleName);
if (trace.IsVexOverride && !string.IsNullOrWhiteSpace(trace.VexVendor))
{
PolicyEngineTelemetry.RecordVexOverride(trace.PolicyId, trace.VexVendor);
}
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_runBuffers.Clear();
}
private sealed class RunTraceBuffer
{
private readonly List<RuleHitTrace> _traces = new();
private readonly object _lock = new();
public int TotalRulesEvaluated { get; set; }
public long TotalEvaluationMs { get; set; }
public int Count
{
get
{
lock (_lock)
{
return _traces.Count;
}
}
}
public void Add(RuleHitTrace trace)
{
lock (_lock)
{
_traces.Add(trace);
}
}
public IReadOnlyList<RuleHitTrace> GetTraces()
{
lock (_lock)
{
return _traces.ToList();
}
}
public IReadOnlyList<RuleHitTrace> FlushAndGet()
{
lock (_lock)
{
var result = _traces.ToList();
_traces.Clear();
return result;
}
}
}
}
/// <summary>
/// Exports rule hit traces to structured logs.
/// </summary>
public sealed class LoggingRuleHitTraceExporter : IRuleHitTraceExporter
{
private readonly Action<string, RuleHitTrace>? _logTrace;
private readonly Action<string, RuleHitStatistics>? _logStatistics;
public LoggingRuleHitTraceExporter(
Action<string, RuleHitTrace>? logTrace = null,
Action<string, RuleHitStatistics>? logStatistics = null)
{
_logTrace = logTrace;
_logStatistics = logStatistics;
}
public Task ExportAsync(
string runId,
IReadOnlyList<RuleHitTrace> traces,
RuleHitStatistics statistics,
CancellationToken cancellationToken = default)
{
if (_logTrace is not null)
{
foreach (var trace in traces)
{
_logTrace(runId, trace);
}
}
_logStatistics?.Invoke(runId, statistics);
return Task.CompletedTask;
}
}
/// <summary>
/// Exports rule hit traces to the Activity/span for distributed tracing.
/// </summary>
public sealed class ActivityRuleHitTraceExporter : IRuleHitTraceExporter
{
public Task ExportAsync(
string runId,
IReadOnlyList<RuleHitTrace> traces,
RuleHitStatistics statistics,
CancellationToken cancellationToken = default)
{
var activity = Activity.Current;
if (activity is null)
{
return Task.CompletedTask;
}
// Add statistics as activity tags
activity.SetTag("policy.rules_evaluated", statistics.TotalRulesEvaluated);
activity.SetTag("policy.rules_fired", statistics.TotalRulesFired);
activity.SetTag("policy.vex_overrides", statistics.TotalVexOverrides);
activity.SetTag("policy.evaluation_ms", statistics.TotalEvaluationMs);
// Add top rules as events
foreach (var rule in statistics.TopRulesByHitCount.Take(5))
{
var tags = new ActivityTagsCollection
{
{ "rule.name", rule.RuleName },
{ "rule.hits", rule.HitCount },
{ "rule.outcome", rule.Outcome }
};
activity.AddEvent(new ActivityEvent("policy.rule.fired", tags: tags));
}
// Add VEX override summary
if (statistics.TotalVexOverrides > 0)
{
foreach (var (vendor, count) in statistics.VexOverridesByVendor)
{
var tags = new ActivityTagsCollection
{
{ "vex.vendor", vendor },
{ "vex.count", count }
};
activity.AddEvent(new ActivityEvent("policy.vex.override", tags: tags));
}
}
return Task.CompletedTask;
}
}
/// <summary>
/// In-memory exporter for testing.
/// </summary>
public sealed class InMemoryRuleHitTraceExporter : IRuleHitTraceExporter
{
private readonly ConcurrentDictionary<string, ExportedRun> _exports = new();
public Task ExportAsync(
string runId,
IReadOnlyList<RuleHitTrace> traces,
RuleHitStatistics statistics,
CancellationToken cancellationToken = default)
{
_exports.AddOrUpdate(
runId,
_ => new ExportedRun(traces.ToList(), statistics),
(_, existing) =>
{
existing.Traces.AddRange(traces);
return existing with { Statistics = statistics };
});
return Task.CompletedTask;
}
public ExportedRun? GetExport(string runId)
{
_exports.TryGetValue(runId, out var export);
return export;
}
public IReadOnlyDictionary<string, ExportedRun> GetAllExports() =>
_exports.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
public void Clear() => _exports.Clear();
public sealed record ExportedRun(List<RuleHitTrace> Traces, RuleHitStatistics Statistics);
}

View File

@@ -45,6 +45,84 @@ public sealed class PolicyBundleServiceTests
Assert.NotEmpty(response.Diagnostics);
}
[Fact]
public async Task CompileAndStoreAsync_ReturnsAocMetadata()
{
var services = CreateServices();
var request = new PolicyBundleRequest(new PolicyDslPayload("stella-dsl@1", BaselineDsl), SigningKeyId: "test-key");
var response = await services.BundleService.CompileAndStoreAsync("pack-1", 1, request, CancellationToken.None);
Assert.True(response.Success);
Assert.NotNull(response.AocMetadata);
Assert.StartsWith("comp-", response.AocMetadata!.CompilationId);
Assert.Equal("stella-dsl@1", response.AocMetadata.CompilerVersion);
Assert.StartsWith("sha256:", response.AocMetadata.SourceDigest);
Assert.StartsWith("sha256:", response.AocMetadata.ArtifactDigest);
Assert.True(response.AocMetadata.RuleCount >= 1);
Assert.True(response.AocMetadata.ComplexityScore >= 0);
}
[Fact]
public async Task CompileAndStoreAsync_IncludesProvenanceWhenProvided()
{
var services = CreateServices();
var provenance = new PolicyProvenanceInput(
SourceType: "git",
SourceUrl: "https://github.com/test/policies",
Submitter: "test-user",
CommitSha: "abc123",
Branch: "main");
var request = new PolicyBundleRequest(
new PolicyDslPayload("stella-dsl@1", BaselineDsl),
SigningKeyId: "test-key",
Provenance: provenance);
var response = await services.BundleService.CompileAndStoreAsync("pack-1", 1, request, CancellationToken.None);
Assert.True(response.Success);
Assert.NotNull(response.AocMetadata);
// Verify bundle record has provenance stored
var bundle = await services.Repository.GetBundleAsync("pack-1", 1, CancellationToken.None);
Assert.NotNull(bundle);
Assert.NotNull(bundle!.AocMetadata);
Assert.NotNull(bundle.AocMetadata!.Provenance);
Assert.Equal("git", bundle.AocMetadata.Provenance!.SourceType);
Assert.Equal("https://github.com/test/policies", bundle.AocMetadata.Provenance.SourceUrl);
Assert.Equal("test-user", bundle.AocMetadata.Provenance.Submitter);
Assert.Equal("abc123", bundle.AocMetadata.Provenance.CommitSha);
Assert.Equal("main", bundle.AocMetadata.Provenance.Branch);
}
[Fact]
public async Task CompileAndStoreAsync_NullAocMetadataOnFailure()
{
var services = CreateServices();
var request = new PolicyBundleRequest(new PolicyDslPayload("unknown", "policy bad"), SigningKeyId: null);
var response = await services.BundleService.CompileAndStoreAsync("pack-1", 1, request, CancellationToken.None);
Assert.False(response.Success);
Assert.Null(response.AocMetadata);
}
[Fact]
public async Task CompileAndStoreAsync_SourceDigestIsDeterministic()
{
var services = CreateServices();
var request1 = new PolicyBundleRequest(new PolicyDslPayload("stella-dsl@1", BaselineDsl), SigningKeyId: "test-key");
var request2 = new PolicyBundleRequest(new PolicyDslPayload("stella-dsl@1", BaselineDsl), SigningKeyId: "test-key");
var response1 = await services.BundleService.CompileAndStoreAsync("pack-1", 1, request1, CancellationToken.None);
var response2 = await services.BundleService.CompileAndStoreAsync("pack-2", 1, request2, CancellationToken.None);
Assert.NotNull(response1.AocMetadata);
Assert.NotNull(response2.AocMetadata);
Assert.Equal(response1.AocMetadata!.SourceDigest, response2.AocMetadata!.SourceDigest);
Assert.Equal(response1.AocMetadata.ArtifactDigest, response2.AocMetadata.ArtifactDigest);
}
private static ServiceHarness CreateServices()
{
var compiler = new PolicyCompiler();
@@ -53,10 +131,11 @@ public sealed class PolicyBundleServiceTests
var compilationService = new PolicyCompilationService(compiler, complexity, new StaticOptionsMonitor(options.Value), TimeProvider.System);
var repo = new InMemoryPolicyPackRepository();
return new ServiceHarness(
new PolicyBundleService(compilationService, repo, TimeProvider.System));
new PolicyBundleService(compilationService, repo, TimeProvider.System),
repo);
}
private sealed record ServiceHarness(PolicyBundleService BundleService);
private sealed record ServiceHarness(PolicyBundleService BundleService, InMemoryPolicyPackRepository Repository);
private sealed class StaticOptionsMonitor : IOptionsMonitor<PolicyEngineOptions>
{

View File

@@ -0,0 +1,268 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Policy.Engine.Caching;
using StellaOps.Policy.Engine.Compilation;
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Evaluation;
using StellaOps.Policy.Engine.Options;
using StellaOps.Policy.Engine.Services;
using StellaOps.PolicyDsl;
using Xunit;
namespace StellaOps.Policy.Engine.Tests;
public sealed class PolicyRuntimeEvaluationServiceTests
{
private const string TestPolicy = """
policy "Test Policy" syntax "stella-dsl@1" {
rule block_critical priority 10 {
when severity.normalized == "Critical"
then status := "blocked"
because "Block critical findings"
}
rule warn_high priority 20 {
when severity.normalized == "High"
then status := "warn"
because "Warn on high severity findings"
}
rule allow_default priority 100 {
when true
then status := "affected"
because "Default affected status"
}
}
""";
[Fact]
public async Task EvaluateAsync_ReturnsDecisionFromCompiledPolicy()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var request = CreateRequest("pack-1", 1, severity: "Critical");
var response = await harness.Service.EvaluateAsync(request, CancellationToken.None);
Assert.Equal("pack-1", response.PackId);
Assert.Equal(1, response.Version);
Assert.NotNull(response.PolicyDigest);
Assert.False(response.Cached);
}
[Fact]
public async Task EvaluateAsync_UsesCacheOnSecondCall()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var request = CreateRequest("pack-1", 1, severity: "High");
// First call - cache miss
var response1 = await harness.Service.EvaluateAsync(request, CancellationToken.None);
Assert.False(response1.Cached);
// Second call - cache hit
var response2 = await harness.Service.EvaluateAsync(request, CancellationToken.None);
Assert.True(response2.Cached);
Assert.Equal(CacheSource.InMemory, response2.CacheSource);
Assert.Equal(response1.Status, response2.Status);
Assert.Equal(response1.CorrelationId, response2.CorrelationId);
}
[Fact]
public async Task EvaluateAsync_BypassCacheWhenRequested()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var request = CreateRequest("pack-1", 1, severity: "Medium");
// First call
var response1 = await harness.Service.EvaluateAsync(request, CancellationToken.None);
Assert.False(response1.Cached);
// Second call with bypass
var bypassRequest = request with { BypassCache = true };
var response2 = await harness.Service.EvaluateAsync(bypassRequest, CancellationToken.None);
Assert.False(response2.Cached);
}
[Fact]
public async Task EvaluateAsync_ThrowsOnMissingBundle()
{
var harness = CreateHarness();
var request = CreateRequest("non-existent", 1, severity: "Low");
await Assert.ThrowsAsync<InvalidOperationException>(
() => harness.Service.EvaluateAsync(request, CancellationToken.None));
}
[Fact]
public async Task EvaluateAsync_GeneratesDeterministicCorrelationId()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var request = CreateRequest("pack-1", 1, severity: "High");
var response1 = await harness.Service.EvaluateAsync(request, CancellationToken.None);
// Create a new harness with fresh cache
var harness2 = CreateHarness();
await harness2.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var response2 = await harness2.Service.EvaluateAsync(request, CancellationToken.None);
// Same inputs should produce same correlation ID
Assert.Equal(response1.CorrelationId, response2.CorrelationId);
}
[Fact]
public async Task EvaluateBatchAsync_ReturnsMultipleResults()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var requests = new[]
{
CreateRequest("pack-1", 1, severity: "Critical", subjectPurl: "pkg:npm/lodash@4.17.0"),
CreateRequest("pack-1", 1, severity: "High", subjectPurl: "pkg:npm/express@4.18.0"),
CreateRequest("pack-1", 1, severity: "Medium", subjectPurl: "pkg:npm/axios@1.0.0"),
};
var responses = await harness.Service.EvaluateBatchAsync(requests, CancellationToken.None);
Assert.Equal(3, responses.Count);
}
[Fact]
public async Task EvaluateBatchAsync_UsesCacheForDuplicates()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
// Pre-populate cache
var request = CreateRequest("pack-1", 1, severity: "Critical");
await harness.Service.EvaluateAsync(request, CancellationToken.None);
var requests = new[]
{
request, // Should be cached
CreateRequest("pack-1", 1, severity: "High"), // New
};
var responses = await harness.Service.EvaluateBatchAsync(requests, CancellationToken.None);
Assert.Equal(2, responses.Count);
Assert.True(responses.Any(r => r.Cached));
Assert.True(responses.Any(r => !r.Cached));
}
[Fact]
public async Task EvaluateAsync_DifferentContextsGetDifferentCacheKeys()
{
var harness = CreateHarness();
await harness.StoreTestPolicyAsync("pack-1", 1, TestPolicy);
var request1 = CreateRequest("pack-1", 1, severity: "High");
var request2 = CreateRequest("pack-1", 1, severity: "Critical");
var response1 = await harness.Service.EvaluateAsync(request1, CancellationToken.None);
var response2 = await harness.Service.EvaluateAsync(request2, CancellationToken.None);
// Both should be cache misses (different severity = different context)
Assert.False(response1.Cached);
Assert.False(response2.Cached);
// Different inputs = different correlation IDs
Assert.NotEqual(response1.CorrelationId, response2.CorrelationId);
}
private static RuntimeEvaluationRequest CreateRequest(
string packId,
int version,
string severity,
string tenantId = "tenant-1",
string subjectPurl = "pkg:npm/lodash@4.17.21",
string advisoryId = "CVE-2024-0001")
{
return new RuntimeEvaluationRequest(
packId,
version,
tenantId,
subjectPurl,
advisoryId,
Severity: new PolicyEvaluationSeverity(severity, null),
Advisory: new PolicyEvaluationAdvisory("NVD", ImmutableDictionary<string, string>.Empty),
Vex: PolicyEvaluationVexEvidence.Empty,
Sbom: PolicyEvaluationSbom.Empty,
Exceptions: PolicyEvaluationExceptions.Empty,
Reachability: PolicyEvaluationReachability.Unknown,
EvaluationTimestamp: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
BypassCache: false);
}
private static TestHarness CreateHarness()
{
var repository = new InMemoryPolicyPackRepository();
var cacheLogger = NullLogger<InMemoryPolicyEvaluationCache>.Instance;
var serviceLogger = NullLogger<PolicyRuntimeEvaluationService>.Instance;
var options = Microsoft.Extensions.Options.Options.Create(new PolicyEngineOptions());
var cache = new InMemoryPolicyEvaluationCache(cacheLogger, TimeProvider.System, options);
var evaluator = new PolicyEvaluator();
var compilationService = CreateCompilationService();
var service = new PolicyRuntimeEvaluationService(
repository,
cache,
evaluator,
TimeProvider.System,
serviceLogger);
return new TestHarness(service, repository, compilationService);
}
private static PolicyCompilationService CreateCompilationService()
{
var compiler = new PolicyCompiler();
var analyzer = new PolicyComplexityAnalyzer();
var options = new PolicyEngineOptions();
var optionsMonitor = new StaticOptionsMonitor(options);
return new PolicyCompilationService(compiler, analyzer, optionsMonitor, TimeProvider.System);
}
private sealed record TestHarness(
PolicyRuntimeEvaluationService Service,
InMemoryPolicyPackRepository Repository,
PolicyCompilationService CompilationService)
{
public async Task StoreTestPolicyAsync(string packId, int version, string dsl)
{
var bundleService = new PolicyBundleService(CompilationService, Repository, TimeProvider.System);
var request = new PolicyBundleRequest(new PolicyDslPayload("stella-dsl@1", dsl), SigningKeyId: null);
await bundleService.CompileAndStoreAsync(packId, version, request, CancellationToken.None);
}
}
private sealed class StaticOptionsMonitor : IOptionsMonitor<PolicyEngineOptions>
{
private readonly PolicyEngineOptions _value;
public StaticOptionsMonitor(PolicyEngineOptions value) => _value = value;
public PolicyEngineOptions CurrentValue => _value;
public PolicyEngineOptions Get(string? name) => _value;
public IDisposable OnChange(Action<PolicyEngineOptions, string> listener) => NullDisposable.Instance;
private sealed class NullDisposable : IDisposable
{
public static readonly NullDisposable Instance = new();
public void Dispose() { }
}
}
}