save progress

This commit is contained in:
master
2026-01-09 18:27:36 +02:00
parent e608752924
commit a21d3dbc1f
361 changed files with 63068 additions and 1192 deletions

View File

@@ -0,0 +1,210 @@
// <copyright file="PolicySimulationInputLock.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Replay.Core;
/// <summary>
/// Represents a locked set of inputs required for policy simulation replay.
/// All digests are SHA-256 hex strings (64 characters).
/// </summary>
public sealed record PolicySimulationInputLock
{
/// <summary>
/// Gets or sets the SHA-256 digest of the policy bundle used for simulation.
/// </summary>
public required string PolicyBundleSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the dependency graph.
/// </summary>
public required string GraphSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the SBOM used for simulation.
/// </summary>
public required string SbomSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the time anchor (feed snapshot timestamp proof).
/// </summary>
public required string TimeAnchorSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the advisory dataset.
/// </summary>
public required string DatasetSha256 { get; init; }
/// <summary>
/// Gets or sets when the input lock was generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Gets or sets whether shadow isolation mode is required for replay.
/// When true, simulation must run in shadow mode to prevent side effects.
/// </summary>
public bool ShadowIsolation { get; init; }
/// <summary>
/// Gets or sets the scopes required for the simulation.
/// </summary>
public string[] RequiredScopes { get; init; } = [];
}
/// <summary>
/// Represents the materialized inputs at replay time for validation against the lock.
/// </summary>
/// <param name="PolicyBundleSha256">The SHA-256 of the current policy bundle.</param>
/// <param name="GraphSha256">The SHA-256 of the current dependency graph.</param>
/// <param name="SbomSha256">The SHA-256 of the current SBOM.</param>
/// <param name="TimeAnchorSha256">The SHA-256 of the current time anchor.</param>
/// <param name="DatasetSha256">The SHA-256 of the current advisory dataset.</param>
/// <param name="ExecutionMode">The execution mode ("shadow" or "live").</param>
/// <param name="AvailableScopes">The scopes available to the current session.</param>
/// <param name="MaterializedAt">When the inputs were materialized.</param>
public sealed record PolicySimulationMaterializedInputs(
string PolicyBundleSha256,
string GraphSha256,
string SbomSha256,
string TimeAnchorSha256,
string DatasetSha256,
string ExecutionMode,
string[] AvailableScopes,
DateTimeOffset MaterializedAt);
/// <summary>
/// Result of validating materialized inputs against an input lock.
/// </summary>
/// <param name="IsValid">Whether the inputs match the lock requirements.</param>
/// <param name="Reason">The reason code for the validation result.</param>
public sealed record PolicySimulationValidationResult(bool IsValid, string Reason)
{
/// <summary>
/// Creates a successful validation result.
/// </summary>
public static PolicySimulationValidationResult Success()
=> new(true, "ok");
/// <summary>
/// Creates a failure result indicating policy bundle digest drift.
/// </summary>
public static PolicySimulationValidationResult PolicyBundleDrift()
=> new(false, "policy-bundle-drift");
/// <summary>
/// Creates a failure result indicating graph digest drift.
/// </summary>
public static PolicySimulationValidationResult GraphDrift()
=> new(false, "graph-drift");
/// <summary>
/// Creates a failure result indicating SBOM digest drift.
/// </summary>
public static PolicySimulationValidationResult SbomDrift()
=> new(false, "sbom-drift");
/// <summary>
/// Creates a failure result indicating time anchor digest drift.
/// </summary>
public static PolicySimulationValidationResult TimeAnchorDrift()
=> new(false, "time-anchor-drift");
/// <summary>
/// Creates a failure result indicating dataset digest drift.
/// </summary>
public static PolicySimulationValidationResult DatasetDrift()
=> new(false, "dataset-drift");
/// <summary>
/// Creates a failure result indicating shadow mode is required but not active.
/// </summary>
public static PolicySimulationValidationResult ShadowModeRequired()
=> new(false, "shadow-mode-required");
/// <summary>
/// Creates a failure result indicating required scopes are missing.
/// </summary>
public static PolicySimulationValidationResult MissingScopes()
=> new(false, "missing-scopes");
/// <summary>
/// Creates a failure result indicating the input lock is stale.
/// </summary>
public static PolicySimulationValidationResult InputsLockStale()
=> new(false, "inputs-lock-stale");
}
/// <summary>
/// Validates materialized inputs against a policy simulation input lock.
/// </summary>
public static class PolicySimulationInputLockValidator
{
/// <summary>
/// Validates that materialized inputs match the locked inputs.
/// </summary>
/// <param name="lock">The input lock to validate against.</param>
/// <param name="inputs">The materialized inputs to validate.</param>
/// <param name="maxAge">Maximum age for the input lock before it's considered stale.</param>
/// <returns>The validation result.</returns>
public static PolicySimulationValidationResult Validate(
PolicySimulationInputLock @lock,
PolicySimulationMaterializedInputs inputs,
TimeSpan maxAge)
{
ArgumentNullException.ThrowIfNull(@lock);
ArgumentNullException.ThrowIfNull(inputs);
// Check if lock is stale
var lockAge = inputs.MaterializedAt - @lock.GeneratedAt;
if (lockAge > maxAge)
{
return PolicySimulationValidationResult.InputsLockStale();
}
// Validate shadow mode requirement
if (@lock.ShadowIsolation &&
!string.Equals(inputs.ExecutionMode, "shadow", StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.ShadowModeRequired();
}
// Validate required scopes
if (@lock.RequiredScopes.Length > 0)
{
var availableSet = new HashSet<string>(inputs.AvailableScopes, StringComparer.OrdinalIgnoreCase);
if (!@lock.RequiredScopes.All(s => availableSet.Contains(s)))
{
return PolicySimulationValidationResult.MissingScopes();
}
}
// Validate digests
if (!string.Equals(@lock.PolicyBundleSha256, inputs.PolicyBundleSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.PolicyBundleDrift();
}
if (!string.Equals(@lock.GraphSha256, inputs.GraphSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.GraphDrift();
}
if (!string.Equals(@lock.SbomSha256, inputs.SbomSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.SbomDrift();
}
if (!string.Equals(@lock.TimeAnchorSha256, inputs.TimeAnchorSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.TimeAnchorDrift();
}
if (!string.Equals(@lock.DatasetSha256, inputs.DatasetSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.DatasetDrift();
}
return PolicySimulationValidationResult.Success();
}
}

View File

@@ -0,0 +1,517 @@
// <copyright file="ReplayExecutor.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
namespace StellaOps.Replay.Core;
/// <summary>
/// Executes policy evaluation replay with resolved inputs for deterministic verification.
/// Sprint: SPRINT_20260107_006_005 Task RB-003
/// </summary>
public sealed class ReplayExecutor
{
private readonly IReplayPolicyEvaluator _policyEvaluator;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReplayExecutor> _logger;
private readonly ReplayExecutorOptions _options;
/// <summary>
/// Initializes a new instance of the <see cref="ReplayExecutor"/> class.
/// </summary>
public ReplayExecutor(
IReplayPolicyEvaluator policyEvaluator,
TimeProvider timeProvider,
ILogger<ReplayExecutor> logger,
ReplayExecutorOptions? options = null)
{
_policyEvaluator = policyEvaluator ?? throw new ArgumentNullException(nameof(policyEvaluator));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? new ReplayExecutorOptions();
}
/// <summary>
/// Executes a replay using resolved inputs.
/// </summary>
/// <param name="resolvedInputs">The resolved inputs from InputManifestResolver.</param>
/// <param name="originalVerdict">The original verdict to compare against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The replay execution result.</returns>
public async Task<ReplayExecutionResult> ExecuteAsync(
ResolvedInputs resolvedInputs,
OriginalVerdict originalVerdict,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(resolvedInputs);
ArgumentNullException.ThrowIfNull(originalVerdict);
var startTime = _timeProvider.GetUtcNow();
var replayId = GenerateReplayId();
_logger.LogInformation(
"Starting replay execution {ReplayId} for correlation {CorrelationId}",
replayId,
originalVerdict.CorrelationId);
try
{
// Create deterministic context with timestamp override
var context = new ReplayContext
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
TimestampOverride = resolvedInputs.TimestampOverride ?? originalVerdict.EvaluatedAt,
RandomSeed = resolvedInputs.RandomSeed ?? 0,
ToolchainVersion = resolvedInputs.ToolchainVersion
};
// Execute policy evaluation with resolved inputs
var replayedVerdict = await _policyEvaluator.EvaluateAsync(
context,
resolvedInputs,
cancellationToken).ConfigureAwait(false);
// Compute digests for comparison
var originalDigest = ComputeVerdictDigest(originalVerdict);
var replayDigest = ComputeVerdictDigest(replayedVerdict);
// Compare results
var deterministicMatch = string.Equals(
originalDigest,
replayDigest,
StringComparison.OrdinalIgnoreCase);
var endTime = _timeProvider.GetUtcNow();
var duration = endTime - startTime;
_logger.LogInformation(
"Replay {ReplayId} completed: match={Match}, duration={Duration}ms",
replayId,
deterministicMatch,
duration.TotalMilliseconds);
return new ReplayExecutionResult
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
Status = ReplayExecutionStatus.Completed,
OriginalDigest = originalDigest,
ReplayDigest = replayDigest,
DeterministicMatch = deterministicMatch,
OriginalVerdict = originalVerdict,
ReplayedVerdict = replayedVerdict,
StartedAt = startTime,
CompletedAt = endTime,
Duration = duration,
Diff = deterministicMatch ? null : GenerateDiff(originalVerdict, replayedVerdict)
};
}
catch (OperationCanceledException)
{
_logger.LogWarning("Replay {ReplayId} was cancelled", replayId);
return new ReplayExecutionResult
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
Status = ReplayExecutionStatus.Cancelled,
StartedAt = startTime,
CompletedAt = _timeProvider.GetUtcNow()
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Replay {ReplayId} failed", replayId);
return new ReplayExecutionResult
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
Status = ReplayExecutionStatus.Failed,
Error = ex.Message,
StartedAt = startTime,
CompletedAt = _timeProvider.GetUtcNow()
};
}
}
/// <summary>
/// Executes a batch of replays.
/// </summary>
public async Task<IReadOnlyList<ReplayExecutionResult>> ExecuteBatchAsync(
IReadOnlyList<ReplayRequest> requests,
CancellationToken cancellationToken = default)
{
var results = new List<ReplayExecutionResult>(requests.Count);
foreach (var request in requests)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await ExecuteAsync(
request.ResolvedInputs,
request.OriginalVerdict,
cancellationToken).ConfigureAwait(false);
results.Add(result);
// Limit concurrency
if (_options.DelayBetweenReplays > TimeSpan.Zero)
{
await Task.Delay(_options.DelayBetweenReplays, cancellationToken)
.ConfigureAwait(false);
}
}
return results;
}
private static string GenerateReplayId()
{
return $"rpl-{Guid.NewGuid():N}"[..20];
}
private static string ComputeVerdictDigest(OriginalVerdict verdict)
{
var builder = new StringBuilder();
builder.Append(verdict.Outcome);
builder.Append('|');
builder.Append(verdict.Severity);
builder.Append('|');
builder.Append(verdict.Score.ToString("F4", CultureInfo.InvariantCulture));
builder.Append('|');
builder.Append(verdict.FindingsCount.ToString(CultureInfo.InvariantCulture));
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeVerdictDigest(ReplayedVerdict verdict)
{
var builder = new StringBuilder();
builder.Append(verdict.Outcome);
builder.Append('|');
builder.Append(verdict.Severity);
builder.Append('|');
builder.Append(verdict.Score.ToString("F4", CultureInfo.InvariantCulture));
builder.Append('|');
builder.Append(verdict.FindingsCount.ToString(CultureInfo.InvariantCulture));
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static VerdictDiff GenerateDiff(OriginalVerdict original, ReplayedVerdict replayed)
{
var differences = new List<DiffEntry>();
if (!string.Equals(original.Outcome, replayed.Outcome, StringComparison.OrdinalIgnoreCase))
{
differences.Add(new DiffEntry("outcome", original.Outcome, replayed.Outcome));
}
if (!string.Equals(original.Severity, replayed.Severity, StringComparison.OrdinalIgnoreCase))
{
differences.Add(new DiffEntry("severity", original.Severity, replayed.Severity));
}
if (Math.Abs(original.Score - replayed.Score) > 0.0001)
{
differences.Add(new DiffEntry(
"score",
original.Score.ToString("F4", CultureInfo.InvariantCulture),
replayed.Score.ToString("F4", CultureInfo.InvariantCulture)));
}
if (original.FindingsCount != replayed.FindingsCount)
{
differences.Add(new DiffEntry(
"findingsCount",
original.FindingsCount.ToString(CultureInfo.InvariantCulture),
replayed.FindingsCount.ToString(CultureInfo.InvariantCulture)));
}
return new VerdictDiff
{
Differences = differences,
Summary = $"{differences.Count} field(s) differ between original and replay"
};
}
}
/// <summary>
/// Options for replay execution.
/// </summary>
public sealed record ReplayExecutorOptions
{
/// <summary>
/// Maximum time to wait for a single replay.
/// </summary>
public TimeSpan Timeout { get; init; } = TimeSpan.FromMinutes(5);
/// <summary>
/// Delay between replays in batch execution.
/// </summary>
public TimeSpan DelayBetweenReplays { get; init; } = TimeSpan.Zero;
/// <summary>
/// Whether to capture detailed execution trace.
/// </summary>
public bool CaptureTrace { get; init; } = false;
}
/// <summary>
/// Interface for policy evaluation during replay.
/// </summary>
public interface IReplayPolicyEvaluator
{
/// <summary>
/// Evaluates policy with the given context and resolved inputs.
/// </summary>
Task<ReplayedVerdict> EvaluateAsync(
ReplayContext context,
ResolvedInputs inputs,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for replay execution.
/// </summary>
public sealed record ReplayContext
{
/// <summary>
/// Unique replay identifier.
/// </summary>
public required string ReplayId { get; init; }
/// <summary>
/// Correlation ID from the original execution.
/// </summary>
public required string CorrelationId { get; init; }
/// <summary>
/// Override timestamp for deterministic replay.
/// </summary>
public DateTimeOffset TimestampOverride { get; init; }
/// <summary>
/// Random seed for deterministic replay.
/// </summary>
public int RandomSeed { get; init; }
/// <summary>
/// Toolchain version used for evaluation.
/// </summary>
public string? ToolchainVersion { get; init; }
}
/// <summary>
/// Original verdict to replay against.
/// </summary>
public sealed record OriginalVerdict
{
/// <summary>
/// Correlation ID from original execution.
/// </summary>
public required string CorrelationId { get; init; }
/// <summary>
/// The verdict outcome (e.g., "pass", "fail", "warn").
/// </summary>
public required string Outcome { get; init; }
/// <summary>
/// Severity level (e.g., "critical", "high", "medium", "low").
/// </summary>
public required string Severity { get; init; }
/// <summary>
/// Numeric score (0.0 to 1.0).
/// </summary>
public double Score { get; init; }
/// <summary>
/// Number of findings.
/// </summary>
public int FindingsCount { get; init; }
/// <summary>
/// When the original evaluation occurred.
/// </summary>
public DateTimeOffset EvaluatedAt { get; init; }
/// <summary>
/// SHA-256 digest of the original verdict.
/// </summary>
public string? Digest { get; init; }
}
/// <summary>
/// Verdict produced by replay execution.
/// </summary>
public sealed record ReplayedVerdict
{
/// <summary>
/// The verdict outcome (e.g., "pass", "fail", "warn").
/// </summary>
public required string Outcome { get; init; }
/// <summary>
/// Severity level (e.g., "critical", "high", "medium", "low").
/// </summary>
public required string Severity { get; init; }
/// <summary>
/// Numeric score (0.0 to 1.0).
/// </summary>
public double Score { get; init; }
/// <summary>
/// Number of findings.
/// </summary>
public int FindingsCount { get; init; }
/// <summary>
/// When the replay evaluation occurred.
/// </summary>
public DateTimeOffset EvaluatedAt { get; init; }
}
/// <summary>
/// Request for batch replay execution.
/// </summary>
public sealed record ReplayRequest
{
/// <summary>
/// The resolved inputs for replay.
/// </summary>
public required ResolvedInputs ResolvedInputs { get; init; }
/// <summary>
/// The original verdict to compare against.
/// </summary>
public required OriginalVerdict OriginalVerdict { get; init; }
}
/// <summary>
/// Result of replay execution.
/// </summary>
public sealed record ReplayExecutionResult
{
/// <summary>
/// Unique replay identifier.
/// </summary>
public required string ReplayId { get; init; }
/// <summary>
/// Correlation ID from the original execution.
/// </summary>
public required string CorrelationId { get; init; }
/// <summary>
/// Execution status.
/// </summary>
public required ReplayExecutionStatus Status { get; init; }
/// <summary>
/// SHA-256 digest of the original verdict.
/// </summary>
public string? OriginalDigest { get; init; }
/// <summary>
/// SHA-256 digest of the replayed verdict.
/// </summary>
public string? ReplayDigest { get; init; }
/// <summary>
/// Whether the replay produced deterministic output.
/// </summary>
public bool? DeterministicMatch { get; init; }
/// <summary>
/// The original verdict.
/// </summary>
public OriginalVerdict? OriginalVerdict { get; init; }
/// <summary>
/// The replayed verdict.
/// </summary>
public ReplayedVerdict? ReplayedVerdict { get; init; }
/// <summary>
/// When the replay started.
/// </summary>
public DateTimeOffset StartedAt { get; init; }
/// <summary>
/// When the replay completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; init; }
/// <summary>
/// Total replay duration.
/// </summary>
public TimeSpan? Duration { get; init; }
/// <summary>
/// Diff report if not deterministic.
/// </summary>
public VerdictDiff? Diff { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? Error { get; init; }
}
/// <summary>
/// Status of replay execution.
/// </summary>
public enum ReplayExecutionStatus
{
/// <summary>Replay has not started.</summary>
NotStarted,
/// <summary>Replay is in progress.</summary>
InProgress,
/// <summary>Replay completed successfully.</summary>
Completed,
/// <summary>Replay failed with an error.</summary>
Failed,
/// <summary>Replay was cancelled.</summary>
Cancelled
}
/// <summary>
/// Diff between original and replayed verdicts.
/// </summary>
public sealed record VerdictDiff
{
/// <summary>
/// List of differences found.
/// </summary>
public IReadOnlyList<DiffEntry> Differences { get; init; } = [];
/// <summary>
/// Human-readable summary.
/// </summary>
public required string Summary { get; init; }
}
/// <summary>
/// A single difference entry.
/// </summary>
/// <param name="Field">The field name that differs.</param>
/// <param name="OriginalValue">The original value.</param>
/// <param name="ReplayedValue">The replayed value.</param>
public sealed record DiffEntry(string Field, string OriginalValue, string ReplayedValue);

View File

@@ -0,0 +1,444 @@
// <copyright file="ReplayJobQueue.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Concurrent;
using System.Threading.Channels;
using Microsoft.Extensions.Logging;
namespace StellaOps.Replay.Core;
/// <summary>
/// Queue for managing async replay job execution with concurrency limits.
/// Sprint: SPRINT_20260107_006_005 Task RB-006
/// </summary>
public sealed class ReplayJobQueue : IDisposable
{
private readonly ReplayExecutor _executor;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReplayJobQueue> _logger;
private readonly ReplayJobQueueOptions _options;
private readonly Channel<ReplayJob> _jobChannel;
private readonly ConcurrentDictionary<string, ReplayJob> _jobs = new();
private readonly CancellationTokenSource _shutdownCts = new();
private readonly Task[] _workerTasks;
private bool _disposed;
/// <summary>
/// Initializes a new instance of the <see cref="ReplayJobQueue"/> class.
/// </summary>
public ReplayJobQueue(
ReplayExecutor executor,
TimeProvider timeProvider,
ILogger<ReplayJobQueue> logger,
ReplayJobQueueOptions? options = null)
{
_executor = executor ?? throw new ArgumentNullException(nameof(executor));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? new ReplayJobQueueOptions();
_jobChannel = Channel.CreateBounded<ReplayJob>(new BoundedChannelOptions(_options.MaxQueueSize)
{
FullMode = BoundedChannelFullMode.Wait,
SingleReader = false,
SingleWriter = false
});
// Start worker tasks
_workerTasks = new Task[_options.MaxConcurrentReplays];
for (int i = 0; i < _options.MaxConcurrentReplays; i++)
{
var workerId = i;
_workerTasks[i] = Task.Run(() => WorkerLoopAsync(workerId, _shutdownCts.Token));
}
_logger.LogInformation(
"ReplayJobQueue started with {WorkerCount} workers, max queue size {QueueSize}",
_options.MaxConcurrentReplays,
_options.MaxQueueSize);
}
/// <summary>
/// Enqueues a replay job for async execution.
/// </summary>
/// <param name="request">The replay request.</param>
/// <param name="cancellationToken">Cancellation token for the enqueue operation.</param>
/// <returns>The job ID for tracking.</returns>
public async Task<string> EnqueueAsync(
ReplayJobRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ObjectDisposedException.ThrowIf(_disposed, this);
var jobId = GenerateJobId();
var now = _timeProvider.GetUtcNow();
var job = new ReplayJob
{
JobId = jobId,
Request = request,
Status = ReplayJobStatus.Queued,
CreatedAt = now,
CancellationSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)
};
_jobs[jobId] = job;
await _jobChannel.Writer.WriteAsync(job, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Enqueued replay job {JobId} for correlation {CorrelationId}",
jobId, request.OriginalVerdict.CorrelationId);
return jobId;
}
/// <summary>
/// Gets the status of a replay job.
/// </summary>
public ReplayJobStatus? GetJobStatus(string jobId)
{
return _jobs.TryGetValue(jobId, out var job) ? job.Status : null;
}
/// <summary>
/// Gets the full job details.
/// </summary>
public ReplayJob? GetJob(string jobId)
{
return _jobs.TryGetValue(jobId, out var job) ? job : null;
}
/// <summary>
/// Cancels a pending or running job.
/// </summary>
public bool CancelJob(string jobId)
{
if (!_jobs.TryGetValue(jobId, out var job))
{
return false;
}
if (job.Status is ReplayJobStatus.Completed or ReplayJobStatus.Failed or ReplayJobStatus.Cancelled)
{
return false;
}
job.CancellationSource.Cancel();
UpdateJobStatus(jobId, ReplayJobStatus.Cancelled);
_logger.LogInformation("Cancelled replay job {JobId}", jobId);
return true;
}
/// <summary>
/// Gets statistics about the queue.
/// </summary>
public ReplayQueueStats GetStats()
{
var queued = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Queued);
var running = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Running);
var completed = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Completed);
var failed = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Failed);
var cancelled = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Cancelled);
return new ReplayQueueStats
{
QueuedJobs = queued,
RunningJobs = running,
CompletedJobs = completed,
FailedJobs = failed,
CancelledJobs = cancelled,
TotalJobs = _jobs.Count,
MaxConcurrentWorkers = _options.MaxConcurrentReplays,
MaxQueueSize = _options.MaxQueueSize
};
}
private async Task WorkerLoopAsync(int workerId, CancellationToken shutdownToken)
{
_logger.LogDebug("Replay worker {WorkerId} started", workerId);
try
{
await foreach (var job in _jobChannel.Reader.ReadAllAsync(shutdownToken))
{
if (shutdownToken.IsCancellationRequested)
{
break;
}
await ProcessJobAsync(workerId, job).ConfigureAwait(false);
}
}
catch (OperationCanceledException) when (shutdownToken.IsCancellationRequested)
{
// Normal shutdown
}
catch (Exception ex)
{
_logger.LogError(ex, "Replay worker {WorkerId} crashed", workerId);
}
_logger.LogDebug("Replay worker {WorkerId} stopped", workerId);
}
private async Task ProcessJobAsync(int workerId, ReplayJob job)
{
if (job.CancellationSource.IsCancellationRequested)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.Cancelled);
return;
}
_logger.LogInformation("Worker {WorkerId} starting job {JobId}", workerId, job.JobId);
UpdateJobStatus(job.JobId, ReplayJobStatus.Running);
job.StartedAt = _timeProvider.GetUtcNow();
try
{
// Create combined cancellation with timeout
using var timeoutCts = new CancellationTokenSource(_options.JobTimeout);
using var combinedCts = CancellationTokenSource.CreateLinkedTokenSource(
job.CancellationSource.Token,
timeoutCts.Token);
var result = await _executor.ExecuteAsync(
job.Request.ResolvedInputs,
job.Request.OriginalVerdict,
combinedCts.Token).ConfigureAwait(false);
job.Result = result;
job.CompletedAt = _timeProvider.GetUtcNow();
UpdateJobStatus(job.JobId, result.Status == ReplayExecutionStatus.Completed
? ReplayJobStatus.Completed
: ReplayJobStatus.Failed);
_logger.LogInformation(
"Worker {WorkerId} completed job {JobId}: match={Match}",
workerId,
job.JobId,
result.DeterministicMatch);
}
catch (OperationCanceledException) when (job.CancellationSource.IsCancellationRequested)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.Cancelled);
_logger.LogInformation("Worker {WorkerId} cancelled job {JobId}", workerId, job.JobId);
}
catch (OperationCanceledException)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.TimedOut);
job.Error = $"Job timed out after {_options.JobTimeout.TotalSeconds} seconds";
_logger.LogWarning("Worker {WorkerId} job {JobId} timed out", workerId, job.JobId);
}
catch (Exception ex)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.Failed);
job.Error = ex.Message;
job.CompletedAt = _timeProvider.GetUtcNow();
_logger.LogError(ex, "Worker {WorkerId} job {JobId} failed", workerId, job.JobId);
}
}
private void UpdateJobStatus(string jobId, ReplayJobStatus status)
{
if (_jobs.TryGetValue(jobId, out var job))
{
_jobs[jobId] = job with { Status = status };
}
}
private static string GenerateJobId()
{
return $"job-{Guid.NewGuid():N}"[..20];
}
/// <inheritdoc/>
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_shutdownCts.Cancel();
_jobChannel.Writer.Complete();
try
{
Task.WaitAll(_workerTasks, TimeSpan.FromSeconds(10));
}
catch (AggregateException)
{
// Workers may have already stopped
}
_shutdownCts.Dispose();
foreach (var job in _jobs.Values)
{
job.CancellationSource.Dispose();
}
_logger.LogInformation("ReplayJobQueue disposed");
}
}
/// <summary>
/// Options for the replay job queue.
/// </summary>
public sealed record ReplayJobQueueOptions
{
/// <summary>
/// Maximum concurrent replay workers.
/// </summary>
public int MaxConcurrentReplays { get; init; } = 2;
/// <summary>
/// Maximum queue size before blocking.
/// </summary>
public int MaxQueueSize { get; init; } = 100;
/// <summary>
/// Timeout for individual jobs.
/// </summary>
public TimeSpan JobTimeout { get; init; } = TimeSpan.FromMinutes(5);
/// <summary>
/// How long to retain completed jobs.
/// </summary>
public TimeSpan JobRetentionPeriod { get; init; } = TimeSpan.FromHours(1);
}
/// <summary>
/// Request to enqueue a replay job.
/// </summary>
public sealed record ReplayJobRequest
{
/// <summary>
/// The resolved inputs for replay.
/// </summary>
public required ResolvedInputs ResolvedInputs { get; init; }
/// <summary>
/// The original verdict to compare against.
/// </summary>
public required OriginalVerdict OriginalVerdict { get; init; }
/// <summary>
/// Optional priority (higher = more urgent).
/// </summary>
public int Priority { get; init; } = 0;
}
/// <summary>
/// A replay job in the queue.
/// </summary>
public sealed record ReplayJob
{
/// <summary>
/// Unique job identifier.
/// </summary>
public required string JobId { get; init; }
/// <summary>
/// The replay request.
/// </summary>
public required ReplayJobRequest Request { get; init; }
/// <summary>
/// Current job status.
/// </summary>
public required ReplayJobStatus Status { get; init; }
/// <summary>
/// When the job was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When the job started executing.
/// </summary>
public DateTimeOffset? StartedAt { get; set; }
/// <summary>
/// When the job completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; set; }
/// <summary>
/// The execution result.
/// </summary>
public ReplayExecutionResult? Result { get; set; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? Error { get; set; }
/// <summary>
/// Cancellation source for this job.
/// </summary>
public required CancellationTokenSource CancellationSource { get; init; }
}
/// <summary>
/// Status of a replay job.
/// </summary>
public enum ReplayJobStatus
{
/// <summary>Job is queued and waiting.</summary>
Queued,
/// <summary>Job is currently running.</summary>
Running,
/// <summary>Job completed successfully.</summary>
Completed,
/// <summary>Job failed with an error.</summary>
Failed,
/// <summary>Job was cancelled.</summary>
Cancelled,
/// <summary>Job timed out.</summary>
TimedOut
}
/// <summary>
/// Statistics about the replay queue.
/// </summary>
public sealed record ReplayQueueStats
{
/// <summary>Number of jobs waiting in queue.</summary>
public int QueuedJobs { get; init; }
/// <summary>Number of jobs currently running.</summary>
public int RunningJobs { get; init; }
/// <summary>Number of completed jobs.</summary>
public int CompletedJobs { get; init; }
/// <summary>Number of failed jobs.</summary>
public int FailedJobs { get; init; }
/// <summary>Number of cancelled jobs.</summary>
public int CancelledJobs { get; init; }
/// <summary>Total jobs tracked.</summary>
public int TotalJobs { get; init; }
/// <summary>Maximum concurrent workers configured.</summary>
public int MaxConcurrentWorkers { get; init; }
/// <summary>Maximum queue size configured.</summary>
public int MaxQueueSize { get; init; }
}

View File

@@ -12,10 +12,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />
<PackageReference Include="Moq" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.AuditPack/StellaOps.AuditPack.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
<ProjectReference Include="../../StellaOps.Replay.WebService/StellaOps.Replay.WebService.csproj" />

View File

@@ -0,0 +1,457 @@
// <copyright file="DeterminismVerifierTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Moq;
using StellaOps.Replay.Core;
using Xunit;
namespace StellaOps.Replay.Core.Tests.Unit;
/// <summary>
/// Unit tests for <see cref="DeterminismVerifier"/>.
/// Sprint: SPRINT_20260107_006_005 Task RB-010
/// </summary>
[Trait("Category", "Unit")]
public sealed class DeterminismVerifierTests
{
private readonly Mock<ILogger<DeterminismVerifier>> _logger;
private readonly DeterminismVerifier _sut;
public DeterminismVerifierTests()
{
_logger = new Mock<ILogger<DeterminismVerifier>>();
_sut = new DeterminismVerifier(_logger.Object);
}
#region Verify Tests
[Fact]
public void Verify_IdenticalVerdicts_ReturnsDeterministic()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeTrue();
result.Differences.Should().BeEmpty();
result.DeterminismScore.Should().Be(1.0);
result.OriginalDigest.Should().Be(result.ReplayDigest);
}
[Fact]
public void Verify_DifferentOutcome_ReturnsNonDeterministicWithCriticalDifference()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Fail, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().ContainSingle(d =>
d.Field == "Outcome" &&
d.Severity == DifferenceSeverity.Critical);
result.DeterminismScore.Should().BeLessThan(1.0);
}
[Fact]
public void Verify_DifferentSeverity_ReturnsNonDeterministicWithHighDifference()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "high", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().ContainSingle(d =>
d.Field == "Severity" &&
d.Severity == DifferenceSeverity.High);
}
[Fact]
public void Verify_DifferentFindingCount_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var replayFindings = ImmutableArray<FindingRecord>.Empty;
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d => d.Field == "FindingCount");
}
[Fact]
public void Verify_MissingFindingInReplay_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" },
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" });
var replayFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "Finding:f2" &&
d.OriginalValue == "Present" &&
d.ReplayValue == "Missing");
}
[Fact]
public void Verify_NewFindingInReplay_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var replayFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" },
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" });
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "Finding:f2" &&
d.OriginalValue == "Missing" &&
d.ReplayValue == "Present");
}
[Fact]
public void Verify_FindingSeverityDiffers_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var replayFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "critical" });
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "Finding:f1:Severity" &&
d.OriginalValue == "high" &&
d.ReplayValue == "critical");
}
[Fact]
public void Verify_DifferentRuleOrder_ReturnsNonDeterministic()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-1", "rule-2", "rule-3"]);
var replay = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-1", "rule-3", "rule-2"]);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "RuleOrder" &&
d.Severity == DifferenceSeverity.Low);
}
#endregion
#region ComputeVerdictDigest Tests
[Fact]
public void ComputeVerdictDigest_SameVerdict_ProducesSameDigest()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var verdict1 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var verdict2 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var digest1 = _sut.ComputeVerdictDigest(verdict1);
var digest2 = _sut.ComputeVerdictDigest(verdict2);
// Assert
digest1.Should().Be(digest2);
}
[Fact]
public void ComputeVerdictDigest_DifferentVerdict_ProducesDifferentDigest()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var verdict1 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var verdict2 = CreateVerdict("verdict-1", VerdictOutcome.Fail, "medium", findings);
// Act
var digest1 = _sut.ComputeVerdictDigest(verdict1);
var digest2 = _sut.ComputeVerdictDigest(verdict2);
// Assert
digest1.Should().NotBe(digest2);
}
[Fact]
public void ComputeVerdictDigest_ReturnsSha256Format()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var verdict = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var digest = _sut.ComputeVerdictDigest(verdict);
// Assert
digest.Should().StartWith("sha256:");
digest.Length.Should().Be(7 + 64); // "sha256:" + 64 hex chars
}
[Fact]
public void ComputeVerdictDigest_FindingOrderDoesNotAffectDigest()
{
// Arrange
var findings1 = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" },
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" });
var findings2 = ImmutableArray.Create(
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" },
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var verdict1 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings1);
var verdict2 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings2);
// Act
var digest1 = _sut.ComputeVerdictDigest(verdict1);
var digest2 = _sut.ComputeVerdictDigest(verdict2);
// Assert - should be the same due to deterministic ordering in digest calculation
digest1.Should().Be(digest2);
}
#endregion
#region GenerateDiffReport Tests
[Fact]
public void GenerateDiffReport_MatchingResult_ContainsMatchMessage()
{
// Arrange
var result = new VerificationResult
{
OriginalDigest = "sha256:abc123",
ReplayDigest = "sha256:abc123",
IsDeterministic = true,
DeterminismScore = 1.0,
VerifiedAt = DateTimeOffset.UtcNow
};
// Act
var report = _sut.GenerateDiffReport(result);
// Assert
report.Should().Contain("## Result: MATCH");
report.Should().Contain("identical verdict");
}
[Fact]
public void GenerateDiffReport_MismatchResult_ContainsDifferences()
{
// Arrange
var result = new VerificationResult
{
OriginalDigest = "sha256:abc123",
ReplayDigest = "sha256:def456",
IsDeterministic = false,
DeterminismScore = 0.5,
VerifiedAt = DateTimeOffset.UtcNow,
Differences = ImmutableArray.Create(
new VerdictDifference
{
Field = "Outcome",
OriginalValue = "Pass",
ReplayValue = "Fail",
Severity = DifferenceSeverity.Critical,
Explanation = "The final decision differs"
})
};
// Act
var report = _sut.GenerateDiffReport(result);
// Assert
report.Should().Contain("## Result: MISMATCH");
report.Should().Contain("### Outcome");
report.Should().Contain("**Original:** `Pass`");
report.Should().Contain("**Replay:** `Fail`");
report.Should().Contain("## Possible Causes");
}
[Fact]
public void GenerateDiffReport_ContainsDigestTable()
{
// Arrange
var result = new VerificationResult
{
OriginalDigest = "sha256:originaldigest",
ReplayDigest = "sha256:replaydigest",
IsDeterministic = false,
DeterminismScore = 0.8,
VerifiedAt = DateTimeOffset.UtcNow
};
// Act
var report = _sut.GenerateDiffReport(result);
// Assert
report.Should().Contain("## Digests");
report.Should().Contain("`sha256:originaldigest`");
report.Should().Contain("`sha256:replaydigest`");
}
#endregion
#region DeterminismScore Tests
[Fact]
public void Verify_NoDifferences_ScoreIsOne()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.DeterminismScore.Should().Be(1.0);
}
[Fact]
public void Verify_CriticalDifference_ScoreDecreasesSignificantly()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Fail, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.DeterminismScore.Should().BeLessThanOrEqualTo(0.5); // Critical penalty is 0.5
}
[Fact]
public void Verify_MultipleLowDifferences_ScoreDecreasesModestly()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-1", "rule-2", "rule-3"]);
var replay = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-3", "rule-2", "rule-1"]);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.DeterminismScore.Should().BeGreaterThan(0.8); // Low penalty of 0.05
}
#endregion
#region Helper Methods
private static ImmutableArray<FindingRecord> CreateFindings(
string findingId,
string cveId,
string component,
string severity)
{
return ImmutableArray.Create(new FindingRecord
{
FindingId = findingId,
VulnerabilityId = cveId,
Component = component,
Severity = severity
});
}
private static VerdictRecord CreateVerdict(
string verdictId,
VerdictOutcome outcome,
string severity,
ImmutableArray<FindingRecord> findings)
{
return new VerdictRecord
{
VerdictId = verdictId,
Outcome = outcome,
Severity = severity,
PolicyId = "policy-default",
RuleIds = ImmutableArray.Create("rule-1"),
Findings = findings,
RenderedAt = DateTimeOffset.UtcNow
};
}
private static VerdictRecord CreateVerdictWithRules(
string verdictId,
VerdictOutcome outcome,
string severity,
ImmutableArray<FindingRecord> findings,
string[] ruleIds)
{
return new VerdictRecord
{
VerdictId = verdictId,
Outcome = outcome,
Severity = severity,
PolicyId = "policy-default",
RuleIds = ruleIds.ToImmutableArray(),
Findings = findings,
RenderedAt = DateTimeOffset.UtcNow
};
}
#endregion
}

View File

@@ -0,0 +1,452 @@
// <copyright file="InputManifestResolverTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Moq;
using StellaOps.Replay.Core;
using Xunit;
namespace StellaOps.Replay.Core.Tests.Unit;
/// <summary>
/// Unit tests for <see cref="InputManifestResolver"/>.
/// Sprint: SPRINT_20260107_006_005 Task RB-010
/// </summary>
[Trait("Category", "Unit")]
public sealed class InputManifestResolverTests : IDisposable
{
private readonly Mock<IFeedSnapshotStore> _feedStore;
private readonly Mock<IPolicyManifestStore> _policyStore;
private readonly Mock<IVexDocumentStore> _vexStore;
private readonly MemoryCache _cache;
private readonly Mock<ILogger<InputManifestResolver>> _logger;
private readonly InputManifestResolver _sut;
public InputManifestResolverTests()
{
_feedStore = new Mock<IFeedSnapshotStore>();
_policyStore = new Mock<IPolicyManifestStore>();
_vexStore = new Mock<IVexDocumentStore>();
_cache = new MemoryCache(new MemoryCacheOptions());
_logger = new Mock<ILogger<InputManifestResolver>>();
_sut = new InputManifestResolver(
_feedStore.Object,
_policyStore.Object,
_vexStore.Object,
_cache,
_logger.Object);
}
public void Dispose()
{
_cache.Dispose();
}
#region ResolveAsync Tests
[Fact]
public async Task ResolveAsync_EmptyManifest_ReturnsCompleteWithNoData()
{
// Arrange
var manifest = new InputManifest();
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.Errors.Should().BeEmpty();
result.FeedData.Should().BeNull();
result.PolicyBundle.Should().BeNull();
result.VexDocuments.Should().BeEmpty();
}
[Fact]
public async Task ResolveAsync_FeedSnapshotExists_ResolvesFeedData()
{
// Arrange
var feedHash = "sha256:abc123";
var feedData = new FeedData
{
Hash = feedHash,
Content = "test feed content"u8.ToArray(),
SnapshotAt = DateTimeOffset.UtcNow
};
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ReturnsAsync(feedData);
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.FeedData.Should().Be(feedData);
result.Errors.Should().BeEmpty();
}
[Fact]
public async Task ResolveAsync_FeedSnapshotNotFound_AddsError()
{
// Arrange
var feedHash = "sha256:notfound";
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ReturnsAsync((FeedData?)null);
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeFalse();
result.FeedData.Should().BeNull();
result.Errors.Should().ContainSingle(e =>
e.Type == InputType.FeedSnapshot &&
e.Hash == feedHash &&
e.Message.Contains("Not found"));
}
[Fact]
public async Task ResolveAsync_PolicyManifestExists_ResolvesPolicyBundle()
{
// Arrange
var policyHash = "sha256:policy123";
var policyBundle = new PolicyBundle
{
Hash = policyHash,
Content = ImmutableArray.Create((byte)1, (byte)2, (byte)3),
Version = "1.0.0"
};
_policyStore.Setup(x => x.GetAsync(policyHash, It.IsAny<CancellationToken>()))
.ReturnsAsync(policyBundle);
var manifest = new InputManifest { PolicyManifestHash = policyHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.PolicyBundle.Should().Be(policyBundle);
}
[Fact]
public async Task ResolveAsync_VexDocumentsExist_ResolvesAllDocuments()
{
// Arrange
var vex1 = new VexDocument { Hash = "sha256:vex1", Content = "{}", Format = "OpenVEX" };
var vex2 = new VexDocument { Hash = "sha256:vex2", Content = "{}", Format = "CSAF" };
_vexStore.Setup(x => x.GetAsync("sha256:vex1", It.IsAny<CancellationToken>()))
.ReturnsAsync(vex1);
_vexStore.Setup(x => x.GetAsync("sha256:vex2", It.IsAny<CancellationToken>()))
.ReturnsAsync(vex2);
var manifest = new InputManifest
{
VexDocumentHashes = ImmutableArray.Create("sha256:vex1", "sha256:vex2")
};
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.VexDocuments.Should().HaveCount(2);
result.VexDocuments.Should().Contain(vex1);
result.VexDocuments.Should().Contain(vex2);
}
[Fact]
public async Task ResolveAsync_PartialVexNotFound_AddsErrorButIncludesFound()
{
// Arrange
var vex1 = new VexDocument { Hash = "sha256:vex1", Content = "{}", Format = "OpenVEX" };
_vexStore.Setup(x => x.GetAsync("sha256:vex1", It.IsAny<CancellationToken>()))
.ReturnsAsync(vex1);
_vexStore.Setup(x => x.GetAsync("sha256:vex2", It.IsAny<CancellationToken>()))
.ReturnsAsync((VexDocument?)null);
var manifest = new InputManifest
{
VexDocumentHashes = ImmutableArray.Create("sha256:vex1", "sha256:vex2")
};
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeFalse();
result.VexDocuments.Should().ContainSingle(v => v.Hash == "sha256:vex1");
result.Errors.Should().ContainSingle(e =>
e.Type == InputType.VexDocument &&
e.Hash == "sha256:vex2");
}
[Fact]
public async Task ResolveAsync_FeedStoreThrowsException_AddsError()
{
// Arrange
var feedHash = "sha256:error";
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ThrowsAsync(new InvalidOperationException("Connection failed"));
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeFalse();
result.FeedData.Should().BeNull();
result.Errors.Should().ContainSingle(e =>
e.Type == InputType.FeedSnapshot &&
e.Message.Contains("Connection failed"));
}
[Fact]
public async Task ResolveAsync_PassThroughFields_CopiedToResult()
{
// Arrange
var timestamp = DateTimeOffset.Parse("2026-01-09T12:00:00Z");
var manifest = new InputManifest
{
SourceCodeHash = "sha256:source",
BaseImageDigest = "sha256:baseimage",
ToolchainVersion = "1.2.3",
RandomSeed = 42,
TimestampOverride = timestamp
};
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.SourceCodeHash.Should().Be("sha256:source");
result.BaseImageDigest.Should().Be("sha256:baseimage");
result.ToolchainVersion.Should().Be("1.2.3");
result.RandomSeed.Should().Be(42);
result.TimestampOverride.Should().Be(timestamp);
}
[Fact]
public async Task ResolveAsync_CachesResolvedInputs()
{
// Arrange
var feedHash = "sha256:cacheable";
var feedData = new FeedData
{
Hash = feedHash,
Content = "cached"u8.ToArray(),
SnapshotAt = DateTimeOffset.UtcNow
};
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ReturnsAsync(feedData);
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act - first call
await _sut.ResolveAsync(manifest);
// Act - second call
await _sut.ResolveAsync(manifest);
// Assert - store should only be called once due to caching
_feedStore.Verify(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()), Times.Once);
}
#endregion
#region ValidateAsync Tests
[Fact]
public async Task ValidateAsync_AllInputsExist_ReturnsValid()
{
// Arrange
_feedStore.Setup(x => x.ExistsAsync("sha256:feed", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
_policyStore.Setup(x => x.ExistsAsync("sha256:policy", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
_vexStore.Setup(x => x.ExistsAsync("sha256:vex", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
var manifest = new InputManifest
{
FeedSnapshotHash = "sha256:feed",
PolicyManifestHash = "sha256:policy",
VexDocumentHashes = ImmutableArray.Create("sha256:vex")
};
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeTrue();
result.MissingInputs.Should().BeEmpty();
}
[Fact]
public async Task ValidateAsync_FeedMissing_ReturnsInvalidWithMissingList()
{
// Arrange
_feedStore.Setup(x => x.ExistsAsync("sha256:missing", It.IsAny<CancellationToken>()))
.ReturnsAsync(false);
var manifest = new InputManifest { FeedSnapshotHash = "sha256:missing" };
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeFalse();
result.MissingInputs.Should().ContainSingle(m => m.Contains("Feed snapshot") && m.Contains("sha256:missing"));
}
[Fact]
public async Task ValidateAsync_PolicyMissing_ReturnsInvalidWithMissingList()
{
// Arrange
_policyStore.Setup(x => x.ExistsAsync("sha256:missing", It.IsAny<CancellationToken>()))
.ReturnsAsync(false);
var manifest = new InputManifest { PolicyManifestHash = "sha256:missing" };
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeFalse();
result.MissingInputs.Should().ContainSingle(m => m.Contains("Policy manifest"));
}
[Fact]
public async Task ValidateAsync_VexMissing_ReturnsInvalidWithMissingList()
{
// Arrange
_vexStore.Setup(x => x.ExistsAsync("sha256:vex1", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
_vexStore.Setup(x => x.ExistsAsync("sha256:vex2", It.IsAny<CancellationToken>()))
.ReturnsAsync(false);
var manifest = new InputManifest
{
VexDocumentHashes = ImmutableArray.Create("sha256:vex1", "sha256:vex2")
};
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeFalse();
result.MissingInputs.Should().ContainSingle(m => m.Contains("VEX document") && m.Contains("sha256:vex2"));
}
[Fact]
public async Task ValidateAsync_EmptyManifest_ReturnsValid()
{
// Arrange
var manifest = new InputManifest();
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeTrue();
result.MissingInputs.Should().BeEmpty();
}
#endregion
#region InputManifest Model Tests
[Fact]
public void InputManifest_DefaultValues_AreCorrect()
{
// Arrange & Act
var manifest = new InputManifest();
// Assert
manifest.FeedSnapshotHash.Should().BeNull();
manifest.PolicyManifestHash.Should().BeNull();
manifest.SourceCodeHash.Should().BeNull();
manifest.BaseImageDigest.Should().BeNull();
manifest.VexDocumentHashes.Should().BeEmpty();
manifest.ToolchainVersion.Should().BeNull();
manifest.RandomSeed.Should().BeNull();
manifest.TimestampOverride.Should().BeNull();
}
[Fact]
public void InputManifest_WithInitializer_SetsValues()
{
// Arrange & Act
var timestamp = DateTimeOffset.UtcNow;
var manifest = new InputManifest
{
FeedSnapshotHash = "feed",
PolicyManifestHash = "policy",
SourceCodeHash = "source",
BaseImageDigest = "image",
VexDocumentHashes = ImmutableArray.Create("vex1", "vex2"),
ToolchainVersion = "1.0",
RandomSeed = 42,
TimestampOverride = timestamp
};
// Assert
manifest.FeedSnapshotHash.Should().Be("feed");
manifest.PolicyManifestHash.Should().Be("policy");
manifest.SourceCodeHash.Should().Be("source");
manifest.BaseImageDigest.Should().Be("image");
manifest.VexDocumentHashes.Should().HaveCount(2);
manifest.ToolchainVersion.Should().Be("1.0");
manifest.RandomSeed.Should().Be(42);
manifest.TimestampOverride.Should().Be(timestamp);
}
#endregion
#region ResolvedInputs Model Tests
[Fact]
public void ResolvedInputs_DefaultValues_AreCorrect()
{
// Arrange & Act
var resolved = new ResolvedInputs();
// Assert
resolved.FeedData.Should().BeNull();
resolved.PolicyBundle.Should().BeNull();
resolved.VexDocuments.Should().BeEmpty();
resolved.Errors.Should().BeEmpty();
resolved.IsComplete.Should().BeFalse();
}
#endregion
#region InputResolutionError Model Tests
[Fact]
public void InputResolutionError_RecordEquality_Works()
{
// Arrange
var error1 = new InputResolutionError(InputType.FeedSnapshot, "hash1", "message1");
var error2 = new InputResolutionError(InputType.FeedSnapshot, "hash1", "message1");
var error3 = new InputResolutionError(InputType.PolicyManifest, "hash1", "message1");
// Assert
error1.Should().Be(error2);
error1.Should().NotBe(error3);
}
#endregion
}

View File

@@ -13,6 +13,8 @@ using StellaOps.AuditPack.Services;
using StellaOps.Replay.WebService;
using StellaOps.TestKit;
using Xunit;
using AuditPackResult = StellaOps.AuditPack.Services.ReplayExecutionResult;
using AuditPackStatus = StellaOps.AuditPack.Services.ReplayStatus;
namespace StellaOps.Replay.Core.Tests;
@@ -52,12 +54,12 @@ public class VerdictReplayEndpointsTests
};
}
private static ReplayExecutionResult CreateSuccessResult(bool match = true)
private static AuditPackResult CreateSuccessResult(bool match = true)
{
return new ReplayExecutionResult
return new AuditPackResult
{
Success = true,
Status = match ? ReplayStatus.Match : ReplayStatus.Drift,
Status = match ? AuditPackStatus.Match : AuditPackStatus.Drift,
VerdictMatches = match,
DecisionMatches = match,
OriginalVerdictDigest = "sha256:verdict",
@@ -115,7 +117,7 @@ public class VerdictReplayEndpointsTests
ConfidenceScore = 0.95,
ExpectedOutcome = new ReplayOutcomePrediction
{
ExpectedStatus = ReplayStatus.Match,
ExpectedStatus = AuditPackStatus.Match,
ExpectedDecision = "pass"
}
});
@@ -155,14 +157,14 @@ public class VerdictReplayEndpointsTests
public void CompareDivergence_DetectsDifferences()
{
// Arrange
var original = new ReplayExecutionResult
var original = new AuditPackResult
{
Success = true,
OriginalVerdictDigest = "sha256:aaa",
OriginalDecision = "pass"
};
var replayed = new ReplayExecutionResult
var replayed = new AuditPackResult
{
Success = true,
ReplayedVerdictDigest = "sha256:bbb",
@@ -174,8 +176,8 @@ public class VerdictReplayEndpointsTests
};
_mockPredicate.Setup(p => p.CompareDivergence(
It.IsAny<ReplayExecutionResult>(),
It.IsAny<ReplayExecutionResult>()))
It.IsAny<AuditPackResult>(),
It.IsAny<AuditPackResult>()))
.Returns(new ReplayDivergenceReport
{
HasDivergence = true,
@@ -218,10 +220,10 @@ public class VerdictReplayEndpointsTests
public void ReplayExecutionResult_DriftItems_ArePopulated()
{
// Arrange
var result = new ReplayExecutionResult
var result = new AuditPackResult
{
Success = true,
Status = ReplayStatus.Drift,
Status = AuditPackStatus.Drift,
VerdictMatches = false,
Drifts =
[

View File

@@ -11,6 +11,8 @@ using StellaOps.AuditPack.Models;
using StellaOps.AuditPack.Services;
using StellaOps.TestKit;
using Xunit;
using AuditPackResult = StellaOps.AuditPack.Services.ReplayExecutionResult;
using AuditPackStatus = StellaOps.AuditPack.Services.ReplayStatus;
namespace StellaOps.Replay.Core.Tests;
@@ -195,7 +197,7 @@ public class VerdictReplayIntegrationTests
{
// Arrange
var attestationService = new ReplayAttestationService();
var replays = new List<(AuditBundleManifest, ReplayExecutionResult)>
var replays = new List<(AuditBundleManifest, AuditPackResult)>
{
(CreateTestManifest("bundle-1"), CreateMatchingReplayResult()),
(CreateTestManifest("bundle-2"), CreateDivergentReplayResult()),
@@ -358,12 +360,12 @@ public class VerdictReplayIntegrationTests
};
}
private static ReplayExecutionResult CreateMatchingReplayResult()
private static AuditPackResult CreateMatchingReplayResult()
{
return new ReplayExecutionResult
return new AuditPackResult
{
Success = true,
Status = ReplayStatus.Match,
Status = AuditPackStatus.Match,
VerdictMatches = true,
DecisionMatches = true,
OriginalVerdictDigest = "sha256:verdict-digest-123",
@@ -376,12 +378,12 @@ public class VerdictReplayIntegrationTests
};
}
private static ReplayExecutionResult CreateDivergentReplayResult()
private static AuditPackResult CreateDivergentReplayResult()
{
return new ReplayExecutionResult
return new AuditPackResult
{
Success = true,
Status = ReplayStatus.Drift,
Status = AuditPackStatus.Drift,
VerdictMatches = false,
DecisionMatches = false,
OriginalVerdictDigest = "sha256:verdict-original",