feat(rate-limiting): Implement core rate limiting functionality with configuration, decision-making, metrics, middleware, and service registration
- Add RateLimitConfig for configuration management with YAML binding support. - Introduce RateLimitDecision to encapsulate the result of rate limit checks. - Implement RateLimitMetrics for OpenTelemetry metrics tracking. - Create RateLimitMiddleware for enforcing rate limits on incoming requests. - Develop RateLimitService to orchestrate instance and environment rate limit checks. - Add RateLimitServiceCollectionExtensions for dependency injection registration.
This commit is contained in:
@@ -0,0 +1,441 @@
|
||||
// ───────────────────────────────────────────────────────────────────────────
|
||||
// StellaOps Attestor — Distributed Verification Provider (Resilient, Multi-Node)
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// ───────────────────────────────────────────────────────────────────────────
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Polly;
|
||||
using Polly.CircuitBreaker;
|
||||
using Polly.Retry;
|
||||
using Polly.Timeout;
|
||||
using StellaOps.Attestor.Verify.Configuration;
|
||||
using StellaOps.Attestor.Verify.Models;
|
||||
|
||||
namespace StellaOps.Attestor.Verify.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Provides distributed verification by distributing work across multiple verification nodes.
|
||||
/// Implements circuit breaker, retry policies, and consistent hashing for deterministic routing.
|
||||
/// </summary>
|
||||
public class DistributedVerificationProvider : IVerificationProvider
|
||||
{
|
||||
private readonly ILogger<DistributedVerificationProvider> _logger;
|
||||
private readonly DistributedVerificationOptions _options;
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ConcurrentDictionary<string, CircuitBreakerState> _circuitStates = new();
|
||||
private readonly ConsistentHashRing _hashRing;
|
||||
private readonly ResiliencePipeline<VerificationResult> _resiliencePipeline;
|
||||
|
||||
public DistributedVerificationProvider(
|
||||
ILogger<DistributedVerificationProvider> logger,
|
||||
IOptions<DistributedVerificationOptions> options,
|
||||
HttpClient httpClient)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
|
||||
if (_options.Nodes == null || _options.Nodes.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one verification node must be configured");
|
||||
}
|
||||
|
||||
_hashRing = new ConsistentHashRing(_options.Nodes, _options.VirtualNodeMultiplier);
|
||||
_resiliencePipeline = BuildResiliencePipeline();
|
||||
|
||||
_logger.LogInformation("Initialized distributed verification provider with {NodeCount} nodes", _options.Nodes.Count);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<VerificationResult> VerifyAsync(
|
||||
VerificationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
// Compute deterministic hash for routing
|
||||
var routingKey = ComputeRoutingKey(request);
|
||||
var orderedNodes = _hashRing.GetOrderedNodes(routingKey);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Routing verification request {RequestId} with key {RoutingKey} through {NodeCount} nodes",
|
||||
request.RequestId,
|
||||
routingKey,
|
||||
orderedNodes.Count);
|
||||
|
||||
// Try nodes in order until one succeeds
|
||||
List<Exception> exceptions = [];
|
||||
foreach (var node in orderedNodes)
|
||||
{
|
||||
if (!IsNodeHealthy(node))
|
||||
{
|
||||
_logger.LogDebug("Skipping unhealthy node {NodeId}", node.Id);
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _resiliencePipeline.ExecuteAsync(
|
||||
async ct => await ExecuteVerificationAsync(node, request, ct),
|
||||
cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Verification request {RequestId} completed on node {NodeId} with result {Status}",
|
||||
request.RequestId,
|
||||
node.Id,
|
||||
result.Status);
|
||||
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or BrokenCircuitException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Node {NodeId} failed for request {RequestId}", node.Id, request.RequestId);
|
||||
exceptions.Add(ex);
|
||||
MarkNodeUnhealthy(node);
|
||||
}
|
||||
}
|
||||
|
||||
// All nodes failed
|
||||
_logger.LogError(
|
||||
"All {NodeCount} nodes failed for verification request {RequestId}",
|
||||
orderedNodes.Count,
|
||||
request.RequestId);
|
||||
|
||||
return new VerificationResult
|
||||
{
|
||||
RequestId = request.RequestId,
|
||||
Status = VerificationStatus.Error,
|
||||
ErrorMessage = $"All verification nodes failed. {exceptions.Count} errors occurred.",
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<HealthCheckResult> CheckHealthAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var results = new ConcurrentDictionary<string, bool>();
|
||||
var tasks = _options.Nodes.Select(async node =>
|
||||
{
|
||||
try
|
||||
{
|
||||
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
cts.CancelAfter(TimeSpan.FromSeconds(5));
|
||||
|
||||
var response = await _httpClient.GetAsync(
|
||||
new Uri(node.Endpoint, "health"),
|
||||
cts.Token);
|
||||
|
||||
results[node.Id] = response.IsSuccessStatusCode;
|
||||
}
|
||||
catch
|
||||
{
|
||||
results[node.Id] = false;
|
||||
}
|
||||
});
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
var healthyCount = results.Count(r => r.Value);
|
||||
var totalCount = results.Count;
|
||||
|
||||
return new HealthCheckResult
|
||||
{
|
||||
IsHealthy = healthyCount >= _options.MinHealthyNodes,
|
||||
HealthyNodeCount = healthyCount,
|
||||
TotalNodeCount = totalCount,
|
||||
NodeStatuses = results.ToDictionary(r => r.Key, r => r.Value),
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current distribution statistics for monitoring.
|
||||
/// </summary>
|
||||
public DistributionStats GetDistributionStats()
|
||||
{
|
||||
var healthyNodes = _options.Nodes.Where(IsNodeHealthy).ToList();
|
||||
var unhealthyNodes = _options.Nodes.Except(healthyNodes).ToList();
|
||||
|
||||
return new DistributionStats
|
||||
{
|
||||
TotalNodes = _options.Nodes.Count,
|
||||
HealthyNodes = healthyNodes.Count,
|
||||
UnhealthyNodes = unhealthyNodes.Count,
|
||||
VirtualNodesPerNode = _options.VirtualNodeMultiplier,
|
||||
CircuitBreakerStates = _circuitStates.ToDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => kvp.Value.ToString()),
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<VerificationResult> ExecuteVerificationAsync(
|
||||
VerificationNode node,
|
||||
VerificationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var endpoint = new Uri(node.Endpoint, "api/v1/verify");
|
||||
|
||||
_logger.LogDebug(
|
||||
"Sending verification request {RequestId} to node {NodeId} at {Endpoint}",
|
||||
request.RequestId,
|
||||
node.Id,
|
||||
endpoint);
|
||||
|
||||
using var response = await _httpClient.PostAsJsonAsync(endpoint, request, cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var result = await response.Content.ReadFromJsonAsync<VerificationResult>(cancellationToken);
|
||||
return result ?? throw new InvalidOperationException("Received null response from verification node");
|
||||
}
|
||||
|
||||
private ResiliencePipeline<VerificationResult> BuildResiliencePipeline()
|
||||
{
|
||||
return new ResiliencePipelineBuilder<VerificationResult>()
|
||||
.AddTimeout(new TimeoutStrategyOptions
|
||||
{
|
||||
Timeout = _options.RequestTimeout,
|
||||
OnTimeout = args =>
|
||||
{
|
||||
_logger.LogWarning("Request timed out after {Timeout}", args.Timeout);
|
||||
return default;
|
||||
},
|
||||
})
|
||||
.AddRetry(new RetryStrategyOptions<VerificationResult>
|
||||
{
|
||||
MaxRetryAttempts = _options.MaxRetries,
|
||||
Delay = _options.RetryDelay,
|
||||
BackoffType = DelayBackoffType.Exponential,
|
||||
ShouldHandle = new PredicateBuilder<VerificationResult>()
|
||||
.Handle<HttpRequestException>()
|
||||
.Handle<TaskCanceledException>(),
|
||||
OnRetry = args =>
|
||||
{
|
||||
_logger.LogWarning(
|
||||
args.Outcome.Exception,
|
||||
"Retry attempt {AttemptNumber} after delay {Delay}",
|
||||
args.AttemptNumber,
|
||||
args.RetryDelay);
|
||||
return default;
|
||||
},
|
||||
})
|
||||
.Build();
|
||||
}
|
||||
|
||||
private static string ComputeRoutingKey(VerificationRequest request)
|
||||
{
|
||||
// Create a deterministic routing key based on the content to verify
|
||||
// This ensures the same content always routes to the same primary node
|
||||
var keyMaterial = $"{request.DigestAlgorithm}:{request.Digest}:{request.ArtifactUri}";
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(keyMaterial));
|
||||
return Convert.ToHexString(hashBytes);
|
||||
}
|
||||
|
||||
private bool IsNodeHealthy(VerificationNode node)
|
||||
{
|
||||
if (!_circuitStates.TryGetValue(node.Id, out var state))
|
||||
{
|
||||
return true; // No circuit breaker state means healthy
|
||||
}
|
||||
|
||||
// Allow recovery after cooldown period
|
||||
if (state.LastFailure.HasValue &&
|
||||
DateTimeOffset.UtcNow - state.LastFailure.Value > _options.CircuitBreakerCooldown)
|
||||
{
|
||||
state.FailureCount = 0;
|
||||
state.LastFailure = null;
|
||||
return true;
|
||||
}
|
||||
|
||||
return state.FailureCount < _options.CircuitBreakerThreshold;
|
||||
}
|
||||
|
||||
private void MarkNodeUnhealthy(VerificationNode node)
|
||||
{
|
||||
var state = _circuitStates.GetOrAdd(node.Id, _ => new CircuitBreakerState());
|
||||
state.FailureCount++;
|
||||
state.LastFailure = DateTimeOffset.UtcNow;
|
||||
|
||||
if (state.FailureCount >= _options.CircuitBreakerThreshold)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Node {NodeId} circuit breaker opened after {FailureCount} failures",
|
||||
node.Id,
|
||||
state.FailureCount);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class CircuitBreakerState
|
||||
{
|
||||
public int FailureCount { get; set; }
|
||||
public DateTimeOffset? LastFailure { get; set; }
|
||||
|
||||
public override string ToString() =>
|
||||
FailureCount >= 3 ? "Open" : FailureCount > 0 ? "HalfOpen" : "Closed";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implements consistent hashing for deterministic node selection.
|
||||
/// </summary>
|
||||
internal sealed class ConsistentHashRing
|
||||
{
|
||||
private readonly SortedDictionary<int, VerificationNode> _ring = new();
|
||||
private readonly int[] _sortedHashes;
|
||||
private readonly VerificationNode[] _sortedNodes;
|
||||
|
||||
public ConsistentHashRing(IReadOnlyList<VerificationNode> nodes, int virtualNodeMultiplier)
|
||||
{
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
for (var i = 0; i < virtualNodeMultiplier; i++)
|
||||
{
|
||||
var virtualKey = $"{node.Id}:{i}";
|
||||
var hash = ComputeHash(virtualKey);
|
||||
_ring[hash] = node;
|
||||
}
|
||||
}
|
||||
|
||||
_sortedHashes = [.. _ring.Keys];
|
||||
_sortedNodes = [.. _ring.Values];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets nodes ordered by proximity to the routing key for failover.
|
||||
/// </summary>
|
||||
public List<VerificationNode> GetOrderedNodes(string routingKey)
|
||||
{
|
||||
var keyHash = ComputeHash(routingKey);
|
||||
|
||||
// Binary search for the first node >= hash
|
||||
var index = Array.BinarySearch(_sortedHashes, keyHash);
|
||||
if (index < 0)
|
||||
{
|
||||
index = ~index;
|
||||
}
|
||||
|
||||
// Collect unique nodes starting from the found position
|
||||
var orderedNodes = new List<VerificationNode>();
|
||||
var seen = new HashSet<string>();
|
||||
|
||||
for (var i = 0; i < _sortedHashes.Length && orderedNodes.Count < _ring.Count; i++)
|
||||
{
|
||||
var actualIndex = (index + i) % _sortedHashes.Length;
|
||||
var node = _sortedNodes[actualIndex];
|
||||
|
||||
if (seen.Add(node.Id))
|
||||
{
|
||||
orderedNodes.Add(node);
|
||||
}
|
||||
}
|
||||
|
||||
return orderedNodes;
|
||||
}
|
||||
|
||||
private static int ComputeHash(string key)
|
||||
{
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(key));
|
||||
return BitConverter.ToInt32(hashBytes, 0);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for distributed verification.
|
||||
/// </summary>
|
||||
public class DistributedVerificationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// List of verification nodes.
|
||||
/// </summary>
|
||||
public List<VerificationNode> Nodes { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Minimum number of healthy nodes required.
|
||||
/// </summary>
|
||||
public int MinHealthyNodes { get; set; } = 1;
|
||||
|
||||
/// <summary>
|
||||
/// Number of virtual nodes per physical node for consistent hashing.
|
||||
/// </summary>
|
||||
public int VirtualNodeMultiplier { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry attempts per node.
|
||||
/// </summary>
|
||||
public int MaxRetries { get; set; } = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Delay between retries.
|
||||
/// </summary>
|
||||
public TimeSpan RetryDelay { get; set; } = TimeSpan.FromMilliseconds(500);
|
||||
|
||||
/// <summary>
|
||||
/// Request timeout per node.
|
||||
/// </summary>
|
||||
public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Number of consecutive failures before circuit breaker opens.
|
||||
/// </summary>
|
||||
public int CircuitBreakerThreshold { get; set; } = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Time before a tripped circuit breaker allows retry.
|
||||
/// </summary>
|
||||
public TimeSpan CircuitBreakerCooldown { get; set; } = TimeSpan.FromMinutes(1);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a verification node in the distributed cluster.
|
||||
/// </summary>
|
||||
public class VerificationNode
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for this node.
|
||||
/// </summary>
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base URI for the node's API.
|
||||
/// </summary>
|
||||
public required Uri Endpoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Node priority (lower = higher priority).
|
||||
/// </summary>
|
||||
public int Priority { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Node region for locality-aware routing.
|
||||
/// </summary>
|
||||
public string? Region { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Health check result for the distributed provider.
|
||||
/// </summary>
|
||||
public class HealthCheckResult
|
||||
{
|
||||
public bool IsHealthy { get; init; }
|
||||
public int HealthyNodeCount { get; init; }
|
||||
public int TotalNodeCount { get; init; }
|
||||
public Dictionary<string, bool> NodeStatuses { get; init; } = [];
|
||||
public DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Distribution statistics for monitoring.
|
||||
/// </summary>
|
||||
public class DistributionStats
|
||||
{
|
||||
public int TotalNodes { get; init; }
|
||||
public int HealthyNodes { get; init; }
|
||||
public int UnhealthyNodes { get; init; }
|
||||
public int VirtualNodesPerNode { get; init; }
|
||||
public Dictionary<string, string> CircuitBreakerStates { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,314 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofsApiContractTests.cs
|
||||
// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
|
||||
// Task: PROOF-API-0010 - API contract tests (OpenAPI validation)
|
||||
// Description: Contract tests to verify API endpoints conform to OpenAPI spec
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using StellaOps.Attestor.WebService.Contracts.Proofs;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Api;
|
||||
|
||||
/// <summary>
|
||||
/// API contract tests for /proofs/* endpoints.
|
||||
/// Validates response shapes, status codes, and error formats per OpenAPI spec.
|
||||
/// </summary>
|
||||
public class ProofsApiContractTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
private readonly HttpClient _client;
|
||||
|
||||
public ProofsApiContractTests(WebApplicationFactory<Program> factory)
|
||||
{
|
||||
_client = factory.CreateClient();
|
||||
}
|
||||
|
||||
#region POST /proofs/{entry}/spine Contract Tests
|
||||
|
||||
[Fact]
|
||||
public async Task CreateSpine_ValidRequest_Returns201Created()
|
||||
{
|
||||
// Arrange
|
||||
var entry = "sha256:abc123def456abc123def456abc123def456abc123def456abc123def456abc1:pkg:npm/lodash@4.17.21";
|
||||
var request = new CreateSpineRequest
|
||||
{
|
||||
EvidenceIds = new[] { "sha256:ev123abc456def789012345678901234567890123456789012345678901234" },
|
||||
ReasoningId = "sha256:reason123abc456def789012345678901234567890123456789012345678901",
|
||||
VexVerdictId = "sha256:vex123abc456def789012345678901234567890123456789012345678901234",
|
||||
PolicyVersion = "v1.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync($"/proofs/{Uri.EscapeDataString(entry)}/spine", request);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Created, response.StatusCode);
|
||||
|
||||
var content = await response.Content.ReadFromJsonAsync<CreateSpineResponse>();
|
||||
Assert.NotNull(content);
|
||||
Assert.NotEmpty(content.ProofBundleId);
|
||||
Assert.Matches(@"^sha256:[a-f0-9]{64}$", content.ProofBundleId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateSpine_InvalidEntryFormat_Returns400BadRequest()
|
||||
{
|
||||
// Arrange
|
||||
var invalidEntry = "not-a-valid-entry";
|
||||
var request = new CreateSpineRequest
|
||||
{
|
||||
EvidenceIds = new[] { "sha256:abc123" },
|
||||
ReasoningId = "sha256:def456",
|
||||
VexVerdictId = "sha256:789xyz",
|
||||
PolicyVersion = "v1.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync($"/proofs/{invalidEntry}/spine", request);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
|
||||
var problemDetails = await response.Content.ReadFromJsonAsync<JsonElement>();
|
||||
Assert.True(problemDetails.TryGetProperty("title", out var title));
|
||||
Assert.NotEmpty(title.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateSpine_MissingRequiredFields_Returns400BadRequest()
|
||||
{
|
||||
// Arrange
|
||||
var entry = "sha256:abc123:pkg:npm/test@1.0.0";
|
||||
var invalidRequest = new { }; // Missing all required fields
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync($"/proofs/{Uri.EscapeDataString(entry)}/spine", invalidRequest);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateSpine_InvalidEvidenceIdFormat_Returns422UnprocessableEntity()
|
||||
{
|
||||
// Arrange
|
||||
var entry = "sha256:abc123def456abc123def456abc123def456abc123def456abc123def456abc1:pkg:npm/test@1.0.0";
|
||||
var request = new CreateSpineRequest
|
||||
{
|
||||
EvidenceIds = new[] { "invalid-not-sha256" }, // Invalid format
|
||||
ReasoningId = "sha256:reason123abc456def789012345678901234567890123456789012345678901",
|
||||
VexVerdictId = "sha256:vex123abc456def789012345678901234567890123456789012345678901234",
|
||||
PolicyVersion = "v1.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync($"/proofs/{Uri.EscapeDataString(entry)}/spine", request);
|
||||
|
||||
// Assert - expect 400 or 422 for validation failure
|
||||
Assert.True(
|
||||
response.StatusCode == HttpStatusCode.BadRequest ||
|
||||
response.StatusCode == HttpStatusCode.UnprocessableEntity);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GET /proofs/{entry}/receipt Contract Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetReceipt_ExistingEntry_Returns200WithReceipt()
|
||||
{
|
||||
// Arrange - first create a spine
|
||||
var entry = "sha256:abc123def456abc123def456abc123def456abc123def456abc123def456abc1:pkg:npm/test@1.0.0";
|
||||
|
||||
// Create spine first
|
||||
var createRequest = new CreateSpineRequest
|
||||
{
|
||||
EvidenceIds = new[] { "sha256:ev123abc456def789012345678901234567890123456789012345678901234" },
|
||||
ReasoningId = "sha256:reason123abc456def789012345678901234567890123456789012345678901",
|
||||
VexVerdictId = "sha256:vex123abc456def789012345678901234567890123456789012345678901234",
|
||||
PolicyVersion = "v1.0.0"
|
||||
};
|
||||
await _client.PostAsJsonAsync($"/proofs/{Uri.EscapeDataString(entry)}/spine", createRequest);
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/proofs/{Uri.EscapeDataString(entry)}/receipt");
|
||||
|
||||
// Assert - may be 200 or 404 depending on implementation state
|
||||
Assert.True(
|
||||
response.StatusCode == HttpStatusCode.OK ||
|
||||
response.StatusCode == HttpStatusCode.NotFound,
|
||||
$"Expected 200 OK or 404 Not Found, got {response.StatusCode}");
|
||||
|
||||
if (response.StatusCode == HttpStatusCode.OK)
|
||||
{
|
||||
var receipt = await response.Content.ReadFromJsonAsync<VerificationReceiptDto>();
|
||||
Assert.NotNull(receipt);
|
||||
Assert.NotEmpty(receipt.ProofBundleId);
|
||||
Assert.NotNull(receipt.VerifiedAt);
|
||||
Assert.NotEmpty(receipt.Result);
|
||||
Assert.Contains(receipt.Result, new[] { "pass", "fail" });
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetReceipt_NonExistentEntry_Returns404NotFound()
|
||||
{
|
||||
// Arrange
|
||||
var nonExistentEntry = "sha256:nonexistent123456789012345678901234567890123456789012345678901:pkg:npm/ghost@0.0.0";
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/proofs/{Uri.EscapeDataString(nonExistentEntry)}/receipt");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
|
||||
var problemDetails = await response.Content.ReadFromJsonAsync<JsonElement>();
|
||||
Assert.True(problemDetails.TryGetProperty("status", out var status));
|
||||
Assert.Equal(404, status.GetInt32());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Response Format Contract Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AllEndpoints_ReturnJsonContentType()
|
||||
{
|
||||
// Arrange
|
||||
var entry = "sha256:test123:pkg:npm/test@1.0.0";
|
||||
|
||||
// Act
|
||||
var getResponse = await _client.GetAsync($"/proofs/{Uri.EscapeDataString(entry)}/receipt");
|
||||
|
||||
// Assert
|
||||
Assert.Contains("application/json", getResponse.Content.Headers.ContentType?.MediaType ?? "");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ErrorResponses_UseProblemDetailsFormat()
|
||||
{
|
||||
// Arrange
|
||||
var invalidEntry = "invalid";
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/proofs/{invalidEntry}/receipt");
|
||||
|
||||
// Assert - check problem details structure
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var content = await response.Content.ReadAsStringAsync();
|
||||
if (!string.IsNullOrEmpty(content))
|
||||
{
|
||||
var json = JsonDocument.Parse(content);
|
||||
// Problem Details should have these fields (RFC 7807)
|
||||
var root = json.RootElement;
|
||||
// At minimum should have status or title
|
||||
Assert.True(
|
||||
root.TryGetProperty("status", out _) ||
|
||||
root.TryGetProperty("title", out _) ||
|
||||
root.TryGetProperty("type", out _),
|
||||
"Error response should follow Problem Details format");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Content Negotiation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Endpoint_AcceptsJsonContentType()
|
||||
{
|
||||
// Arrange
|
||||
var entry = "sha256:abc123def456abc123def456abc123def456abc123def456abc123def456abc1:pkg:npm/test@1.0.0";
|
||||
var request = new CreateSpineRequest
|
||||
{
|
||||
EvidenceIds = new[] { "sha256:ev123abc456def789012345678901234567890123456789012345678901234" },
|
||||
ReasoningId = "sha256:reason123abc456def789012345678901234567890123456789012345678901",
|
||||
VexVerdictId = "sha256:vex123abc456def789012345678901234567890123456789012345678901234",
|
||||
PolicyVersion = "v1.0.0"
|
||||
};
|
||||
|
||||
var jsonContent = new StringContent(
|
||||
JsonSerializer.Serialize(request),
|
||||
System.Text.Encoding.UTF8,
|
||||
"application/json");
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsync($"/proofs/{Uri.EscapeDataString(entry)}/spine", jsonContent);
|
||||
|
||||
// Assert - should accept JSON
|
||||
Assert.NotEqual(HttpStatusCode.UnsupportedMediaType, response.StatusCode);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Contract tests for /anchors/* endpoints.
|
||||
/// </summary>
|
||||
public class AnchorsApiContractTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
private readonly HttpClient _client;
|
||||
|
||||
public AnchorsApiContractTests(WebApplicationFactory<Program> factory)
|
||||
{
|
||||
_client = factory.CreateClient();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAnchor_NonExistentId_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var nonExistentId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/anchors/{nonExistentId}");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAnchor_InvalidIdFormat_Returns400()
|
||||
{
|
||||
// Arrange
|
||||
var invalidId = "not-a-guid";
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/anchors/{invalidId}");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Contract tests for /verify/* endpoints.
|
||||
/// </summary>
|
||||
public class VerifyApiContractTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
private readonly HttpClient _client;
|
||||
|
||||
public VerifyApiContractTests(WebApplicationFactory<Program> factory)
|
||||
{
|
||||
_client = factory.CreateClient();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBundle_InvalidBundleId_Returns400()
|
||||
{
|
||||
// Arrange
|
||||
var invalidBundleId = "invalid";
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsync($"/verify/{invalidBundleId}", null);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,399 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresRekorSubmissionQueueIntegrationTests.cs
|
||||
// Sprint: SPRINT_3000_0001_0002_rekor_retry_queue_metrics
|
||||
// Task: T14
|
||||
// Description: PostgreSQL integration tests for Rekor submission queue
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Npgsql;
|
||||
using StellaOps.Attestor.Core.Observability;
|
||||
using StellaOps.Attestor.Core.Options;
|
||||
using StellaOps.Attestor.Core.Queue;
|
||||
using StellaOps.Attestor.Infrastructure.Queue;
|
||||
using Testcontainers.PostgreSql;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Integration.Queue;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for PostgresRekorSubmissionQueue using Testcontainers.
|
||||
/// These tests verify end-to-end queue operations against a real PostgreSQL instance.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
public class PostgresRekorSubmissionQueueIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private PostgreSqlContainer _postgres = null!;
|
||||
private NpgsqlDataSource _dataSource = null!;
|
||||
private PostgresRekorSubmissionQueue _queue = null!;
|
||||
private FakeTimeProvider _timeProvider = null!;
|
||||
private AttestorMetrics _metrics = null!;
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
_postgres = new PostgreSqlBuilder()
|
||||
.WithImage("postgres:16-alpine")
|
||||
.WithDatabase("stellaops_attestor")
|
||||
.WithUsername("test")
|
||||
.WithPassword("test")
|
||||
.Build();
|
||||
|
||||
await _postgres.StartAsync();
|
||||
|
||||
var connectionString = _postgres.GetConnectionString();
|
||||
_dataSource = NpgsqlDataSource.Create(connectionString);
|
||||
|
||||
// Create the schema and table
|
||||
await CreateSchemaAndTableAsync();
|
||||
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 17, 12, 0, 0, TimeSpan.Zero));
|
||||
_metrics = new AttestorMetrics(new System.Diagnostics.Metrics.Meter("test"));
|
||||
|
||||
_queue = new PostgresRekorSubmissionQueue(
|
||||
_dataSource,
|
||||
Options.Create(new RekorQueueOptions
|
||||
{
|
||||
MaxAttempts = 5,
|
||||
RetryDelaySeconds = 60,
|
||||
BatchSize = 10
|
||||
}),
|
||||
_metrics,
|
||||
_timeProvider,
|
||||
NullLogger<PostgresRekorSubmissionQueue>.Instance);
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _dataSource.DisposeAsync();
|
||||
await _postgres.DisposeAsync();
|
||||
}
|
||||
|
||||
private async Task CreateSchemaAndTableAsync()
|
||||
{
|
||||
const string schemaAndTableSql = """
|
||||
CREATE SCHEMA IF NOT EXISTS attestor;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS attestor.rekor_submission_queue (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id TEXT NOT NULL,
|
||||
bundle_sha256 TEXT NOT NULL,
|
||||
dsse_payload BYTEA NOT NULL,
|
||||
backend TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
attempt_count INT NOT NULL DEFAULT 0,
|
||||
max_attempts INT NOT NULL DEFAULT 5,
|
||||
last_attempt_at TIMESTAMPTZ,
|
||||
last_error TEXT,
|
||||
next_retry_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_queue_status_retry
|
||||
ON attestor.rekor_submission_queue (status, next_retry_at)
|
||||
WHERE status IN ('pending', 'retrying');
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_queue_tenant
|
||||
ON attestor.rekor_submission_queue (tenant_id, created_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_queue_bundle
|
||||
ON attestor.rekor_submission_queue (bundle_sha256);
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync();
|
||||
await using var command = new NpgsqlCommand(schemaAndTableSql, connection);
|
||||
await command.ExecuteNonQueryAsync();
|
||||
}
|
||||
|
||||
#region Enqueue Tests
|
||||
|
||||
[Fact]
|
||||
public async Task EnqueueAsync_ValidItem_InsertsIntoDatabase()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-123";
|
||||
var bundleSha256 = "sha256:abc123";
|
||||
var dssePayload = new byte[] { 0x01, 0x02, 0x03 };
|
||||
var backend = "primary";
|
||||
|
||||
// Act
|
||||
var id = await _queue.EnqueueAsync(tenantId, bundleSha256, dssePayload, backend);
|
||||
|
||||
// Assert
|
||||
id.Should().NotBeEmpty();
|
||||
|
||||
var item = await GetQueueItemByIdAsync(id);
|
||||
item.Should().NotBeNull();
|
||||
item!.TenantId.Should().Be(tenantId);
|
||||
item.BundleSha256.Should().Be(bundleSha256);
|
||||
item.Status.Should().Be(RekorSubmissionStatus.Pending);
|
||||
item.AttemptCount.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnqueueAsync_MultipleItems_AllInserted()
|
||||
{
|
||||
// Arrange & Act
|
||||
var ids = new List<Guid>();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
ids.Add(await _queue.EnqueueAsync(
|
||||
$"tenant-{i}",
|
||||
$"sha256:bundle{i}",
|
||||
new byte[] { (byte)i },
|
||||
"primary"));
|
||||
}
|
||||
|
||||
// Assert
|
||||
var count = await GetQueueCountAsync();
|
||||
count.Should().BeGreaterOrEqualTo(5);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Dequeue Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DequeueAsync_PendingItems_ReturnsAndMarksSubmitting()
|
||||
{
|
||||
// Arrange
|
||||
await _queue.EnqueueAsync("tenant-1", "sha256:bundle1", new byte[] { 0x01 }, "primary");
|
||||
await _queue.EnqueueAsync("tenant-2", "sha256:bundle2", new byte[] { 0x02 }, "primary");
|
||||
|
||||
// Act
|
||||
var items = await _queue.DequeueAsync(10);
|
||||
|
||||
// Assert
|
||||
items.Should().HaveCountGreaterOrEqualTo(2);
|
||||
items.Should().OnlyContain(i => i.Status == RekorSubmissionStatus.Submitting);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DequeueAsync_EmptyQueue_ReturnsEmpty()
|
||||
{
|
||||
// Act
|
||||
var items = await _queue.DequeueAsync(10);
|
||||
|
||||
// Assert - may have items from other tests but status should filter them
|
||||
items.Where(i => i.Status == RekorSubmissionStatus.Pending).Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DequeueAsync_BatchSize_RespectsLimit()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _queue.EnqueueAsync($"tenant-batch-{i}", $"sha256:batch{i}", new byte[] { (byte)i }, "primary");
|
||||
}
|
||||
|
||||
// Act
|
||||
var items = await _queue.DequeueAsync(3);
|
||||
|
||||
// Assert
|
||||
items.Should().HaveCountLessOrEqualTo(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DequeueAsync_ConcurrentSafe_NoDoubleDequeue()
|
||||
{
|
||||
// Arrange
|
||||
var uniqueBundle = $"sha256:concurrent-{Guid.NewGuid()}";
|
||||
await _queue.EnqueueAsync("tenant-concurrent", uniqueBundle, new byte[] { 0x01 }, "primary");
|
||||
|
||||
// Act - Simulate concurrent dequeue
|
||||
var task1 = _queue.DequeueAsync(10);
|
||||
var task2 = _queue.DequeueAsync(10);
|
||||
|
||||
var results = await Task.WhenAll(task1, task2);
|
||||
|
||||
// Assert - Item should only appear in one result
|
||||
var allItems = results.SelectMany(r => r).Where(i => i.BundleSha256 == uniqueBundle).ToList();
|
||||
allItems.Should().HaveCountLessOrEqualTo(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Update Tests
|
||||
|
||||
[Fact]
|
||||
public async Task MarkSubmittedAsync_UpdatesStatusAndLogIndex()
|
||||
{
|
||||
// Arrange
|
||||
var id = await _queue.EnqueueAsync("tenant-1", "sha256:submit", new byte[] { 0x01 }, "primary");
|
||||
await _queue.DequeueAsync(10); // Move to submitting
|
||||
|
||||
// Act
|
||||
await _queue.MarkSubmittedAsync(id, 12345L);
|
||||
|
||||
// Assert
|
||||
var item = await GetQueueItemByIdAsync(id);
|
||||
item!.Status.Should().Be(RekorSubmissionStatus.Submitted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkFailedAsync_SchedulesRetry()
|
||||
{
|
||||
// Arrange
|
||||
var id = await _queue.EnqueueAsync("tenant-1", "sha256:fail", new byte[] { 0x01 }, "primary");
|
||||
await _queue.DequeueAsync(10); // Move to submitting
|
||||
|
||||
// Act
|
||||
await _queue.MarkFailedAsync(id, "Connection refused");
|
||||
|
||||
// Assert
|
||||
var item = await GetQueueItemByIdAsync(id);
|
||||
item!.Status.Should().Be(RekorSubmissionStatus.Retrying);
|
||||
item.LastError.Should().Be("Connection refused");
|
||||
item.AttemptCount.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MarkFailedAsync_MaxAttempts_MovesToDeadLetter()
|
||||
{
|
||||
// Arrange - Use custom options with low max attempts
|
||||
var queue = new PostgresRekorSubmissionQueue(
|
||||
_dataSource,
|
||||
Options.Create(new RekorQueueOptions { MaxAttempts = 2 }),
|
||||
_metrics,
|
||||
_timeProvider,
|
||||
NullLogger<PostgresRekorSubmissionQueue>.Instance);
|
||||
|
||||
var id = await queue.EnqueueAsync("tenant-1", "sha256:deadletter", new byte[] { 0x01 }, "primary");
|
||||
|
||||
// Fail twice
|
||||
await queue.DequeueAsync(10);
|
||||
await queue.MarkFailedAsync(id, "Attempt 1");
|
||||
|
||||
_timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
await queue.DequeueAsync(10);
|
||||
await queue.MarkFailedAsync(id, "Attempt 2");
|
||||
|
||||
// Assert
|
||||
var item = await GetQueueItemByIdAsync(id);
|
||||
item!.Status.Should().Be(RekorSubmissionStatus.DeadLetter);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Queue Depth Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetQueueDepthAsync_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
var baseDepth = await _queue.GetQueueDepthAsync();
|
||||
|
||||
await _queue.EnqueueAsync("tenant-depth-1", "sha256:depth1", new byte[] { 0x01 }, "primary");
|
||||
await _queue.EnqueueAsync("tenant-depth-2", "sha256:depth2", new byte[] { 0x02 }, "primary");
|
||||
|
||||
// Act
|
||||
var newDepth = await _queue.GetQueueDepthAsync();
|
||||
|
||||
// Assert
|
||||
newDepth.Should().BeGreaterOrEqualTo(baseDepth + 2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetDeadLetterCountAsync_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
var queue = new PostgresRekorSubmissionQueue(
|
||||
_dataSource,
|
||||
Options.Create(new RekorQueueOptions { MaxAttempts = 1 }),
|
||||
_metrics,
|
||||
_timeProvider,
|
||||
NullLogger<PostgresRekorSubmissionQueue>.Instance);
|
||||
|
||||
var id = await queue.EnqueueAsync("tenant-dlq", "sha256:dlq", new byte[] { 0x01 }, "primary");
|
||||
await queue.DequeueAsync(10);
|
||||
await queue.MarkFailedAsync(id, "Fail");
|
||||
|
||||
// Act
|
||||
var dlqCount = await queue.GetDeadLetterCountAsync();
|
||||
|
||||
// Assert
|
||||
dlqCount.Should().BeGreaterOrEqualTo(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private async Task<RekorQueueItem?> GetQueueItemByIdAsync(Guid id)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bundle_sha256, dsse_payload, backend,
|
||||
status, attempt_count, max_attempts, next_retry_at,
|
||||
created_at, updated_at, last_error
|
||||
FROM attestor.rekor_submission_queue
|
||||
WHERE id = @id
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync();
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("@id", id);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync();
|
||||
if (await reader.ReadAsync())
|
||||
{
|
||||
return new RekorQueueItem
|
||||
{
|
||||
Id = reader.GetGuid(reader.GetOrdinal("id")),
|
||||
TenantId = reader.GetString(reader.GetOrdinal("tenant_id")),
|
||||
BundleSha256 = reader.GetString(reader.GetOrdinal("bundle_sha256")),
|
||||
DssePayload = reader.GetFieldValue<byte[]>(reader.GetOrdinal("dsse_payload")),
|
||||
Backend = reader.GetString(reader.GetOrdinal("backend")),
|
||||
Status = ParseStatus(reader.GetString(reader.GetOrdinal("status"))),
|
||||
AttemptCount = reader.GetInt32(reader.GetOrdinal("attempt_count")),
|
||||
LastError = reader.IsDBNull(reader.GetOrdinal("last_error"))
|
||||
? null
|
||||
: reader.GetString(reader.GetOrdinal("last_error"))
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async Task<int> GetQueueCountAsync()
|
||||
{
|
||||
const string sql = "SELECT COUNT(*) FROM attestor.rekor_submission_queue";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync();
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
return Convert.ToInt32(await command.ExecuteScalarAsync());
|
||||
}
|
||||
|
||||
private static RekorSubmissionStatus ParseStatus(string status) => status.ToLowerInvariant() switch
|
||||
{
|
||||
"pending" => RekorSubmissionStatus.Pending,
|
||||
"submitting" => RekorSubmissionStatus.Submitting,
|
||||
"submitted" => RekorSubmissionStatus.Submitted,
|
||||
"retrying" => RekorSubmissionStatus.Retrying,
|
||||
"dead_letter" => RekorSubmissionStatus.DeadLetter,
|
||||
_ => throw new ArgumentException($"Unknown status: {status}")
|
||||
};
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for testing.
|
||||
/// </summary>
|
||||
internal sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset initialTime)
|
||||
{
|
||||
_now = initialTime;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
|
||||
public void SetTime(DateTimeOffset time) => _now = time;
|
||||
}
|
||||
@@ -9,8 +9,12 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BouncyCastle.Cryptography" Version="2.6.2" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="Testcontainers" Version="4.3.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="4.3.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
|
||||
@@ -0,0 +1,589 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TimeSkewValidationIntegrationTests.cs
|
||||
// Sprint: SPRINT_3000_0001_0003_rekor_time_skew_validation
|
||||
// Task: T10
|
||||
// Description: Integration tests for time skew validation in submission and verification services
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Attestor.Core.Observability;
|
||||
using StellaOps.Attestor.Core.Options;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using StellaOps.Attestor.Core.Storage;
|
||||
using StellaOps.Attestor.Core.Submission;
|
||||
using StellaOps.Attestor.Core.Verification;
|
||||
using StellaOps.Attestor.Infrastructure.Submission;
|
||||
using StellaOps.Attestor.Infrastructure.Verification;
|
||||
using StellaOps.Attestor.Tests.Support;
|
||||
using StellaOps.Attestor.Verify;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for time skew validation in submission and verification services.
|
||||
/// Per SPRINT_3000_0001_0003 - T10: Add integration coverage.
|
||||
/// </summary>
|
||||
public sealed class TimeSkewValidationIntegrationTests : IDisposable
|
||||
{
|
||||
private static readonly byte[] HmacSecret = Encoding.UTF8.GetBytes("attestor-hmac-secret");
|
||||
private static readonly string HmacSecretBase64 = Convert.ToBase64String(HmacSecret);
|
||||
|
||||
private readonly AttestorMetrics _metrics;
|
||||
private readonly AttestorActivitySource _activitySource;
|
||||
private readonly DefaultDsseCanonicalizer _canonicalizer;
|
||||
private readonly InMemoryAttestorEntryRepository _repository;
|
||||
private readonly InMemoryAttestorDedupeStore _dedupeStore;
|
||||
private readonly InMemoryAttestorAuditSink _auditSink;
|
||||
private readonly NullAttestorArchiveStore _archiveStore;
|
||||
private readonly NullTransparencyWitnessClient _witnessClient;
|
||||
private readonly NullVerificationCache _verificationCache;
|
||||
private bool _disposed;
|
||||
|
||||
public TimeSkewValidationIntegrationTests()
|
||||
{
|
||||
_metrics = new AttestorMetrics();
|
||||
_activitySource = new AttestorActivitySource();
|
||||
_canonicalizer = new DefaultDsseCanonicalizer();
|
||||
_repository = new InMemoryAttestorEntryRepository();
|
||||
_dedupeStore = new InMemoryAttestorDedupeStore();
|
||||
_auditSink = new InMemoryAttestorAuditSink();
|
||||
_archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
|
||||
_witnessClient = new NullTransparencyWitnessClient();
|
||||
_verificationCache = new NullVerificationCache();
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
_metrics.Dispose();
|
||||
_activitySource.Dispose();
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
#region Submission Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Submission_WithTimeSkewBeyondRejectThreshold_ThrowsTimeSkewValidationException_WhenFailOnRejectEnabled()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Create a Rekor client that returns an integrated time way in the past
|
||||
var pastTime = DateTimeOffset.UtcNow.AddSeconds(-600); // 10 minutes ago
|
||||
var rekorClient = new ConfigurableTimeRekorClient(pastTime);
|
||||
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submissionService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<TimeSkewValidationException>(async () =>
|
||||
{
|
||||
await submissionService.SubmitAsync(request, context);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Submission_WithTimeSkewBeyondRejectThreshold_Succeeds_WhenFailOnRejectDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
FailOnReject = false // Disabled - should log but not fail
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Create a Rekor client that returns an integrated time way in the past
|
||||
var pastTime = DateTimeOffset.UtcNow.AddSeconds(-600); // 10 minutes ago
|
||||
var rekorClient = new ConfigurableTimeRekorClient(pastTime);
|
||||
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submissionService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
|
||||
// Act
|
||||
var result = await submissionService.SubmitAsync(request, context);
|
||||
|
||||
// Assert - should succeed but emit metrics
|
||||
Assert.NotNull(result);
|
||||
Assert.NotNull(result.Uuid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Submission_WithTimeSkewBelowWarnThreshold_Succeeds()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Create a Rekor client that returns an integrated time just a few seconds ago
|
||||
var recentTime = DateTimeOffset.UtcNow.AddSeconds(-10); // 10 seconds ago
|
||||
var rekorClient = new ConfigurableTimeRekorClient(recentTime);
|
||||
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submissionService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
|
||||
// Act
|
||||
var result = await submissionService.SubmitAsync(request, context);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.NotNull(result.Uuid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Submission_WithFutureTimestamp_ThrowsTimeSkewValidationException()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
MaxFutureSkewSeconds = 60,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Create a Rekor client that returns a future integrated time
|
||||
var futureTime = DateTimeOffset.UtcNow.AddSeconds(120); // 2 minutes in the future
|
||||
var rekorClient = new ConfigurableTimeRekorClient(futureTime);
|
||||
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submissionService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<TimeSkewValidationException>(async () =>
|
||||
{
|
||||
await submissionService.SubmitAsync(request, context);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Submission_WhenValidationDisabled_SkipsTimeSkewCheck()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = false // Disabled
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Create a Rekor client with a very old integrated time
|
||||
var veryOldTime = DateTimeOffset.UtcNow.AddHours(-24);
|
||||
var rekorClient = new ConfigurableTimeRekorClient(veryOldTime);
|
||||
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submissionService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
|
||||
// Act - should succeed even with very old timestamp because validation is disabled
|
||||
var result = await submissionService.SubmitAsync(request, context);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.NotNull(result.Uuid);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verification Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Verification_WithTimeSkewBeyondRejectThreshold_IncludesIssueInReport_WhenFailOnRejectEnabled()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// First, submit with normal time
|
||||
var submitRekorClient = new ConfigurableTimeRekorClient(DateTimeOffset.UtcNow);
|
||||
var submitTimeSkewValidator = new TimeSkewValidator(new TimeSkewOptions { Enabled = false }); // Disable for submission
|
||||
|
||||
var submitService = CreateSubmissionService(options, submitRekorClient, submitTimeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
var submissionResult = await submitService.SubmitAsync(request, context);
|
||||
|
||||
// Now manually update the entry with an old integrated time for verification testing
|
||||
var entry = await _repository.GetByUuidAsync(submissionResult.Uuid);
|
||||
Assert.NotNull(entry);
|
||||
|
||||
// Create a new entry with old integrated time
|
||||
var oldIntegratedTime = DateTimeOffset.UtcNow.AddSeconds(-600); // 10 minutes ago
|
||||
var updatedEntry = entry with
|
||||
{
|
||||
Log = entry.Log with
|
||||
{
|
||||
IntegratedTimeUtc = oldIntegratedTime
|
||||
}
|
||||
};
|
||||
await _repository.SaveAsync(updatedEntry);
|
||||
|
||||
// Create verification service with time skew validation enabled
|
||||
var verifyTimeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
|
||||
var verificationService = CreateVerificationService(options, rekorClient, verifyTimeSkewValidator);
|
||||
|
||||
// Act
|
||||
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
|
||||
{
|
||||
Uuid = submissionResult.Uuid,
|
||||
Bundle = request.Bundle
|
||||
});
|
||||
|
||||
// Assert
|
||||
Assert.False(verifyResult.Ok);
|
||||
Assert.Contains(verifyResult.Issues, i => i.Contains("time_skew"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Verification_WithTimeSkewBelowThreshold_PassesValidation()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Submit with recent integrated time
|
||||
var recentTime = DateTimeOffset.UtcNow.AddSeconds(-5);
|
||||
var rekorClient = new ConfigurableTimeRekorClient(recentTime);
|
||||
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submitService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
var submissionResult = await submitService.SubmitAsync(request, context);
|
||||
|
||||
// Verify
|
||||
var verifyRekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
|
||||
var verificationService = CreateVerificationService(options, verifyRekorClient, timeSkewValidator);
|
||||
|
||||
// Act
|
||||
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
|
||||
{
|
||||
Uuid = submissionResult.Uuid,
|
||||
Bundle = request.Bundle
|
||||
});
|
||||
|
||||
// Assert - should pass (no time skew issue)
|
||||
// Note: Other issues may exist (e.g., witness_missing) but not time_skew
|
||||
Assert.DoesNotContain(verifyResult.Issues, i => i.Contains("time_skew_rejected"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Verification_OfflineMode_SkipsTimeSkewValidation()
|
||||
{
|
||||
// Arrange
|
||||
var timeSkewOptions = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true, // Enabled, but should be skipped in offline mode due to missing integrated time
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
var options = CreateAttestorOptions(timeSkewOptions);
|
||||
|
||||
// Submit without integrated time (simulates offline stored entry)
|
||||
var rekorClient = new ConfigurableTimeRekorClient(integratedTime: null);
|
||||
var timeSkewValidator = new InstrumentedTimeSkewValidator(
|
||||
timeSkewOptions,
|
||||
_metrics,
|
||||
new NullLogger<InstrumentedTimeSkewValidator>());
|
||||
|
||||
var submitService = CreateSubmissionService(options, rekorClient, timeSkewValidator);
|
||||
var (request, context) = CreateSubmissionRequest();
|
||||
var submissionResult = await submitService.SubmitAsync(request, context);
|
||||
|
||||
// Verify
|
||||
var verifyRekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
|
||||
var verificationService = CreateVerificationService(options, verifyRekorClient, timeSkewValidator);
|
||||
|
||||
// Act
|
||||
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
|
||||
{
|
||||
Uuid = submissionResult.Uuid,
|
||||
Bundle = request.Bundle
|
||||
});
|
||||
|
||||
// Assert - should not have time skew issues (skipped due to missing integrated time)
|
||||
Assert.DoesNotContain(verifyResult.Issues, i => i.Contains("time_skew_rejected"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Metrics Integration Tests
|
||||
|
||||
[Fact]
|
||||
public void TimeSkewMetrics_AreRegistered()
|
||||
{
|
||||
// Assert - metrics should be created
|
||||
Assert.NotNull(_metrics.TimeSkewDetectedTotal);
|
||||
Assert.NotNull(_metrics.TimeSkewSeconds);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private IOptions<AttestorOptions> CreateAttestorOptions(TimeSkewOptions timeSkewOptions)
|
||||
{
|
||||
return Options.Create(new AttestorOptions
|
||||
{
|
||||
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.stellaops.test",
|
||||
ProofTimeoutMs = 1000,
|
||||
PollIntervalMs = 50,
|
||||
MaxAttempts = 2
|
||||
}
|
||||
},
|
||||
Security = new AttestorOptions.SecurityOptions
|
||||
{
|
||||
SignerIdentity = new AttestorOptions.SignerIdentityOptions
|
||||
{
|
||||
Mode = { "kms" },
|
||||
KmsKeys = { HmacSecretBase64 }
|
||||
}
|
||||
},
|
||||
TimeSkew = timeSkewOptions
|
||||
});
|
||||
}
|
||||
|
||||
private AttestorSubmissionService CreateSubmissionService(
|
||||
IOptions<AttestorOptions> options,
|
||||
IRekorClient rekorClient,
|
||||
ITimeSkewValidator timeSkewValidator)
|
||||
{
|
||||
return new AttestorSubmissionService(
|
||||
new AttestorSubmissionValidator(_canonicalizer),
|
||||
_repository,
|
||||
_dedupeStore,
|
||||
rekorClient,
|
||||
_witnessClient,
|
||||
_archiveStore,
|
||||
_auditSink,
|
||||
_verificationCache,
|
||||
timeSkewValidator,
|
||||
options,
|
||||
new NullLogger<AttestorSubmissionService>(),
|
||||
TimeProvider.System,
|
||||
_metrics);
|
||||
}
|
||||
|
||||
private AttestorVerificationService CreateVerificationService(
|
||||
IOptions<AttestorOptions> options,
|
||||
IRekorClient rekorClient,
|
||||
ITimeSkewValidator timeSkewValidator)
|
||||
{
|
||||
var engine = new AttestorVerificationEngine(
|
||||
_canonicalizer,
|
||||
new TestCryptoHash(),
|
||||
options,
|
||||
new NullLogger<AttestorVerificationEngine>());
|
||||
|
||||
return new AttestorVerificationService(
|
||||
_repository,
|
||||
_canonicalizer,
|
||||
rekorClient,
|
||||
_witnessClient,
|
||||
engine,
|
||||
timeSkewValidator,
|
||||
options,
|
||||
new NullLogger<AttestorVerificationService>(),
|
||||
_metrics,
|
||||
_activitySource,
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
private (AttestorSubmissionRequest Request, SubmissionContext Context) CreateSubmissionRequest()
|
||||
{
|
||||
var artifactSha256 = Convert.ToHexStringLower(RandomNumberGenerator.GetBytes(32));
|
||||
var payloadType = "application/vnd.in-toto+json";
|
||||
var payloadJson = $$$"""{"_type":"https://in-toto.io/Statement/v0.1","subject":[{"name":"test","digest":{"sha256":"{{{artifactSha256}}}"}}],"predicateType":"https://slsa.dev/provenance/v1","predicate":{}}""";
|
||||
var payload = Encoding.UTF8.GetBytes(payloadJson);
|
||||
|
||||
var payloadBase64 = Convert.ToBase64String(payload);
|
||||
|
||||
// Create HMAC signature
|
||||
using var hmac = new HMACSHA256(HmacSecret);
|
||||
var signature = hmac.ComputeHash(payload);
|
||||
var signatureBase64 = Convert.ToBase64String(signature);
|
||||
|
||||
var bundle = new DsseBundle
|
||||
{
|
||||
Mode = "kms",
|
||||
PayloadType = payloadType,
|
||||
Payload = payloadBase64,
|
||||
Signatures =
|
||||
[
|
||||
new DsseSignature
|
||||
{
|
||||
KeyId = "kms-key-1",
|
||||
Sig = signatureBase64
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var bundleBytes = _canonicalizer.Canonicalize(bundle);
|
||||
var bundleSha256 = Convert.ToHexStringLower(SHA256.HashData(bundleBytes));
|
||||
|
||||
var request = new AttestorSubmissionRequest
|
||||
{
|
||||
Bundle = bundle,
|
||||
Meta = new AttestorSubmissionRequest.MetaData
|
||||
{
|
||||
BundleSha256 = bundleSha256,
|
||||
Artifact = new AttestorSubmissionRequest.ArtifactInfo
|
||||
{
|
||||
Sha256 = artifactSha256,
|
||||
Kind = "container",
|
||||
ImageDigest = $"sha256:{artifactSha256}"
|
||||
},
|
||||
LogPreference = "primary"
|
||||
}
|
||||
};
|
||||
|
||||
var context = new SubmissionContext
|
||||
{
|
||||
CallerSubject = "urn:stellaops:signer",
|
||||
CallerAudience = "attestor",
|
||||
CallerClientId = "signer-service",
|
||||
CallerTenant = "default"
|
||||
};
|
||||
|
||||
return (request, context);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Doubles
|
||||
|
||||
/// <summary>
|
||||
/// A Rekor client that returns configurable integrated times.
|
||||
/// </summary>
|
||||
private sealed class ConfigurableTimeRekorClient : IRekorClient
|
||||
{
|
||||
private readonly DateTimeOffset? _integratedTime;
|
||||
private int _callCount;
|
||||
|
||||
public ConfigurableTimeRekorClient(DateTimeOffset? integratedTime)
|
||||
{
|
||||
_integratedTime = integratedTime;
|
||||
}
|
||||
|
||||
public Task<RekorSubmissionResponse> SubmitAsync(
|
||||
RekorSubmissionRequest request,
|
||||
string url,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var uuid = Guid.NewGuid().ToString("N");
|
||||
var index = Interlocked.Increment(ref _callCount);
|
||||
|
||||
return Task.FromResult(new RekorSubmissionResponse
|
||||
{
|
||||
Uuid = uuid,
|
||||
Index = index,
|
||||
LogUrl = url,
|
||||
Status = "included",
|
||||
IntegratedTimeUtc = _integratedTime
|
||||
});
|
||||
}
|
||||
|
||||
public Task<RekorProofResponse?> GetProofAsync(
|
||||
string uuid,
|
||||
string url,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult<RekorProofResponse?>(new RekorProofResponse
|
||||
{
|
||||
TreeId = "test-tree-id",
|
||||
LogIndex = 1,
|
||||
TreeSize = 100,
|
||||
RootHash = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32)),
|
||||
Hashes = [Convert.ToBase64String(RandomNumberGenerator.GetBytes(32))]
|
||||
});
|
||||
}
|
||||
|
||||
public Task<RekorEntryResponse?> GetEntryAsync(
|
||||
string uuid,
|
||||
string url,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult<RekorEntryResponse?>(null);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,707 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Receipts;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of the verification pipeline per advisory §9.1.
|
||||
/// Executes DSSE signature verification, ID recomputation, Merkle proof
|
||||
/// verification, and Rekor inclusion proof verification.
|
||||
/// </summary>
|
||||
public sealed class VerificationPipeline : IVerificationPipeline
|
||||
{
|
||||
private readonly IReadOnlyList<IVerificationStep> _steps;
|
||||
private readonly ILogger<VerificationPipeline> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public VerificationPipeline(
|
||||
IEnumerable<IVerificationStep> steps,
|
||||
ILogger<VerificationPipeline> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_steps = steps?.ToList() ?? throw new ArgumentNullException(nameof(steps));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a pipeline with the default verification steps.
|
||||
/// </summary>
|
||||
public static VerificationPipeline CreateDefault(
|
||||
IProofBundleStore proofStore,
|
||||
IDsseVerifier dsseVerifier,
|
||||
IRekorVerifier rekorVerifier,
|
||||
ITrustAnchorResolver trustAnchorResolver,
|
||||
ILogger<VerificationPipeline> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
var steps = new List<IVerificationStep>
|
||||
{
|
||||
new DsseSignatureVerificationStep(proofStore, dsseVerifier, logger),
|
||||
new IdRecomputationVerificationStep(proofStore, logger),
|
||||
new RekorInclusionVerificationStep(proofStore, rekorVerifier, logger),
|
||||
new TrustAnchorVerificationStep(trustAnchorResolver, logger)
|
||||
};
|
||||
|
||||
return new VerificationPipeline(steps, logger, timeProvider);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<VerificationPipelineResult> VerifyAsync(
|
||||
VerificationPipelineRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var context = new VerificationContext
|
||||
{
|
||||
ProofBundleId = request.ProofBundleId,
|
||||
TrustAnchorId = request.TrustAnchorId,
|
||||
VerifyRekor = request.VerifyRekor
|
||||
};
|
||||
|
||||
var stepResults = new List<VerificationStepResult>();
|
||||
var pipelineStartTime = _timeProvider.GetUtcNow();
|
||||
var overallPassed = true;
|
||||
string? failureReason = null;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting verification pipeline for proof bundle {ProofBundleId}",
|
||||
request.ProofBundleId);
|
||||
|
||||
foreach (var step in _steps)
|
||||
{
|
||||
if (ct.IsCancellationRequested)
|
||||
{
|
||||
stepResults.Add(CreateCancelledResult(step.Name));
|
||||
overallPassed = false;
|
||||
failureReason = "Verification cancelled";
|
||||
break;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var result = await step.ExecuteAsync(context, ct);
|
||||
stepResults.Add(result);
|
||||
|
||||
if (!result.Passed)
|
||||
{
|
||||
overallPassed = false;
|
||||
failureReason = $"{step.Name}: {result.ErrorMessage}";
|
||||
|
||||
_logger.LogWarning(
|
||||
"Verification step {StepName} failed: {ErrorMessage}",
|
||||
step.Name, result.ErrorMessage);
|
||||
|
||||
// Continue to collect all results, but mark as failed
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Verification step {StepName} passed in {Duration}ms",
|
||||
step.Name, result.Duration.TotalMilliseconds);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Verification step {StepName} threw an exception", step.Name);
|
||||
|
||||
stepResults.Add(new VerificationStepResult
|
||||
{
|
||||
StepName = step.Name,
|
||||
Passed = false,
|
||||
Duration = TimeSpan.Zero,
|
||||
ErrorMessage = $"Exception: {ex.Message}"
|
||||
});
|
||||
|
||||
overallPassed = false;
|
||||
failureReason = $"{step.Name}: {ex.Message}";
|
||||
}
|
||||
}
|
||||
|
||||
var pipelineDuration = _timeProvider.GetUtcNow() - pipelineStartTime;
|
||||
|
||||
// Generate receipt
|
||||
var receipt = new VerificationReceipt
|
||||
{
|
||||
ReceiptId = GenerateReceiptId(),
|
||||
Result = overallPassed ? VerificationResult.Pass : VerificationResult.Fail,
|
||||
VerifiedAt = pipelineStartTime,
|
||||
VerifierVersion = request.VerifierVersion,
|
||||
ProofBundleId = request.ProofBundleId.Value,
|
||||
FailureReason = failureReason,
|
||||
StepsSummary = stepResults.Select(s => new VerificationStepSummary
|
||||
{
|
||||
StepName = s.StepName,
|
||||
Passed = s.Passed,
|
||||
DurationMs = (int)s.Duration.TotalMilliseconds
|
||||
}).ToList(),
|
||||
TotalDurationMs = (int)pipelineDuration.TotalMilliseconds
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Verification pipeline completed for {ProofBundleId}: {Result} in {Duration}ms",
|
||||
request.ProofBundleId, receipt.Result, pipelineDuration.TotalMilliseconds);
|
||||
|
||||
return new VerificationPipelineResult
|
||||
{
|
||||
IsValid = overallPassed,
|
||||
Receipt = receipt,
|
||||
Steps = stepResults
|
||||
};
|
||||
}
|
||||
|
||||
private static VerificationStepResult CreateCancelledResult(string stepName) => new()
|
||||
{
|
||||
StepName = stepName,
|
||||
Passed = false,
|
||||
Duration = TimeSpan.Zero,
|
||||
ErrorMessage = "Verification cancelled"
|
||||
};
|
||||
|
||||
private static string GenerateReceiptId()
|
||||
{
|
||||
var bytes = new byte[16];
|
||||
RandomNumberGenerator.Fill(bytes);
|
||||
return $"receipt:{Convert.ToHexString(bytes).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature verification step (PROOF-API-0006).
|
||||
/// Verifies that all DSSE envelopes in the proof bundle have valid signatures.
|
||||
/// </summary>
|
||||
public sealed class DsseSignatureVerificationStep : IVerificationStep
|
||||
{
|
||||
private readonly IProofBundleStore _proofStore;
|
||||
private readonly IDsseVerifier _dsseVerifier;
|
||||
private readonly ILogger _logger;
|
||||
|
||||
public string Name => "dsse_signature";
|
||||
|
||||
public DsseSignatureVerificationStep(
|
||||
IProofBundleStore proofStore,
|
||||
IDsseVerifier dsseVerifier,
|
||||
ILogger logger)
|
||||
{
|
||||
_proofStore = proofStore ?? throw new ArgumentNullException(nameof(proofStore));
|
||||
_dsseVerifier = dsseVerifier ?? throw new ArgumentNullException(nameof(dsseVerifier));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<VerificationStepResult> ExecuteAsync(
|
||||
VerificationContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
// Get the proof bundle
|
||||
var bundle = await _proofStore.GetBundleAsync(context.ProofBundleId, ct);
|
||||
if (bundle is null)
|
||||
{
|
||||
return CreateFailedResult(stopwatch.Elapsed, $"Proof bundle {context.ProofBundleId} not found");
|
||||
}
|
||||
|
||||
// Verify each envelope signature
|
||||
var verifiedKeyIds = new List<string>();
|
||||
foreach (var envelope in bundle.Envelopes)
|
||||
{
|
||||
var verifyResult = await _dsseVerifier.VerifyAsync(envelope, ct);
|
||||
if (!verifyResult.IsValid)
|
||||
{
|
||||
return CreateFailedResult(
|
||||
stopwatch.Elapsed,
|
||||
$"DSSE signature verification failed for envelope: {verifyResult.ErrorMessage}",
|
||||
keyId: verifyResult.KeyId);
|
||||
}
|
||||
verifiedKeyIds.Add(verifyResult.KeyId);
|
||||
}
|
||||
|
||||
// Store verified key IDs for trust anchor verification
|
||||
context.SetData("verifiedKeyIds", verifiedKeyIds);
|
||||
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = true,
|
||||
Duration = stopwatch.Elapsed,
|
||||
Details = $"Verified {bundle.Envelopes.Count} envelope(s)"
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "DSSE signature verification failed with exception");
|
||||
return CreateFailedResult(stopwatch.Elapsed, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private VerificationStepResult CreateFailedResult(TimeSpan duration, string error, string? keyId = null) => new()
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = duration,
|
||||
ErrorMessage = error,
|
||||
KeyId = keyId
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ID recomputation verification step (PROOF-API-0007).
|
||||
/// Verifies that content-addressed IDs match the actual content.
|
||||
/// </summary>
|
||||
public sealed class IdRecomputationVerificationStep : IVerificationStep
|
||||
{
|
||||
private readonly IProofBundleStore _proofStore;
|
||||
private readonly ILogger _logger;
|
||||
|
||||
public string Name => "id_recomputation";
|
||||
|
||||
public IdRecomputationVerificationStep(
|
||||
IProofBundleStore proofStore,
|
||||
ILogger logger)
|
||||
{
|
||||
_proofStore = proofStore ?? throw new ArgumentNullException(nameof(proofStore));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<VerificationStepResult> ExecuteAsync(
|
||||
VerificationContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
// Get the proof bundle
|
||||
var bundle = await _proofStore.GetBundleAsync(context.ProofBundleId, ct);
|
||||
if (bundle is null)
|
||||
{
|
||||
return CreateFailedResult(stopwatch.Elapsed, $"Proof bundle {context.ProofBundleId} not found");
|
||||
}
|
||||
|
||||
// Recompute the proof bundle ID from content
|
||||
var recomputedId = ComputeProofBundleId(bundle);
|
||||
|
||||
// Compare with claimed ID
|
||||
var claimedId = context.ProofBundleId.Value;
|
||||
if (!recomputedId.Equals(claimedId, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = stopwatch.Elapsed,
|
||||
ErrorMessage = "Proof bundle ID does not match content hash",
|
||||
Expected = claimedId,
|
||||
Actual = recomputedId
|
||||
};
|
||||
}
|
||||
|
||||
// Verify each statement ID
|
||||
foreach (var statement in bundle.Statements)
|
||||
{
|
||||
var recomputedStatementId = ComputeStatementId(statement);
|
||||
if (!recomputedStatementId.Equals(statement.StatementId, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = stopwatch.Elapsed,
|
||||
ErrorMessage = $"Statement ID mismatch",
|
||||
Expected = statement.StatementId,
|
||||
Actual = recomputedStatementId
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = true,
|
||||
Duration = stopwatch.Elapsed,
|
||||
Details = $"Verified bundle ID and {bundle.Statements.Count} statement ID(s)"
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "ID recomputation verification failed with exception");
|
||||
return CreateFailedResult(stopwatch.Elapsed, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeProofBundleId(ProofBundle bundle)
|
||||
{
|
||||
// Hash the canonical JSON representation of the bundle
|
||||
var canonicalJson = JsonSerializer.Serialize(bundle, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ComputeStatementId(ProofStatement statement)
|
||||
{
|
||||
// Hash the canonical JSON representation of the statement
|
||||
var canonicalJson = JsonSerializer.Serialize(statement, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private VerificationStepResult CreateFailedResult(TimeSpan duration, string error) => new()
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = duration,
|
||||
ErrorMessage = error
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor inclusion proof verification step (PROOF-API-0008).
|
||||
/// Verifies that proof bundles are included in Rekor transparency log.
|
||||
/// </summary>
|
||||
public sealed class RekorInclusionVerificationStep : IVerificationStep
|
||||
{
|
||||
private readonly IProofBundleStore _proofStore;
|
||||
private readonly IRekorVerifier _rekorVerifier;
|
||||
private readonly ILogger _logger;
|
||||
|
||||
public string Name => "rekor_inclusion";
|
||||
|
||||
public RekorInclusionVerificationStep(
|
||||
IProofBundleStore proofStore,
|
||||
IRekorVerifier rekorVerifier,
|
||||
ILogger logger)
|
||||
{
|
||||
_proofStore = proofStore ?? throw new ArgumentNullException(nameof(proofStore));
|
||||
_rekorVerifier = rekorVerifier ?? throw new ArgumentNullException(nameof(rekorVerifier));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<VerificationStepResult> ExecuteAsync(
|
||||
VerificationContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
// Skip if Rekor verification is disabled
|
||||
if (!context.VerifyRekor)
|
||||
{
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = true,
|
||||
Duration = stopwatch.Elapsed,
|
||||
Details = "Rekor verification skipped (disabled in request)"
|
||||
};
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Get the proof bundle
|
||||
var bundle = await _proofStore.GetBundleAsync(context.ProofBundleId, ct);
|
||||
if (bundle is null)
|
||||
{
|
||||
return CreateFailedResult(stopwatch.Elapsed, $"Proof bundle {context.ProofBundleId} not found");
|
||||
}
|
||||
|
||||
// Check if bundle has Rekor log entry
|
||||
if (bundle.RekorLogEntry is null)
|
||||
{
|
||||
return CreateFailedResult(stopwatch.Elapsed, "Proof bundle has no Rekor log entry");
|
||||
}
|
||||
|
||||
// Verify inclusion proof
|
||||
var verifyResult = await _rekorVerifier.VerifyInclusionAsync(
|
||||
bundle.RekorLogEntry.LogId,
|
||||
bundle.RekorLogEntry.LogIndex,
|
||||
bundle.RekorLogEntry.InclusionProof,
|
||||
bundle.RekorLogEntry.SignedTreeHead,
|
||||
ct);
|
||||
|
||||
if (!verifyResult.IsValid)
|
||||
{
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = stopwatch.Elapsed,
|
||||
ErrorMessage = verifyResult.ErrorMessage,
|
||||
LogIndex = bundle.RekorLogEntry.LogIndex
|
||||
};
|
||||
}
|
||||
|
||||
// Store log index for receipt
|
||||
context.SetData("rekorLogIndex", bundle.RekorLogEntry.LogIndex);
|
||||
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = true,
|
||||
Duration = stopwatch.Elapsed,
|
||||
Details = $"Verified inclusion at log index {bundle.RekorLogEntry.LogIndex}",
|
||||
LogIndex = bundle.RekorLogEntry.LogIndex
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Rekor inclusion verification failed with exception");
|
||||
return CreateFailedResult(stopwatch.Elapsed, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private VerificationStepResult CreateFailedResult(TimeSpan duration, string error) => new()
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = duration,
|
||||
ErrorMessage = error
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor verification step.
|
||||
/// Verifies that signatures were made by keys authorized in a trust anchor.
|
||||
/// </summary>
|
||||
public sealed class TrustAnchorVerificationStep : IVerificationStep
|
||||
{
|
||||
private readonly ITrustAnchorResolver _trustAnchorResolver;
|
||||
private readonly ILogger _logger;
|
||||
|
||||
public string Name => "trust_anchor";
|
||||
|
||||
public TrustAnchorVerificationStep(
|
||||
ITrustAnchorResolver trustAnchorResolver,
|
||||
ILogger logger)
|
||||
{
|
||||
_trustAnchorResolver = trustAnchorResolver ?? throw new ArgumentNullException(nameof(trustAnchorResolver));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<VerificationStepResult> ExecuteAsync(
|
||||
VerificationContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
// Get verified key IDs from DSSE step
|
||||
var verifiedKeyIds = context.GetData<List<string>>("verifiedKeyIds");
|
||||
if (verifiedKeyIds is null || verifiedKeyIds.Count == 0)
|
||||
{
|
||||
return CreateFailedResult(stopwatch.Elapsed, "No verified key IDs from DSSE step");
|
||||
}
|
||||
|
||||
// Resolve trust anchor
|
||||
var anchor = context.TrustAnchorId is not null
|
||||
? await _trustAnchorResolver.GetAnchorAsync(context.TrustAnchorId.Value, ct)
|
||||
: await _trustAnchorResolver.FindAnchorForProofAsync(context.ProofBundleId, ct);
|
||||
|
||||
if (anchor is null)
|
||||
{
|
||||
return CreateFailedResult(stopwatch.Elapsed, "No matching trust anchor found");
|
||||
}
|
||||
|
||||
// Verify all key IDs are authorized
|
||||
foreach (var keyId in verifiedKeyIds)
|
||||
{
|
||||
if (!anchor.AllowedKeyIds.Contains(keyId) && !anchor.RevokedKeyIds.Contains(keyId))
|
||||
{
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = stopwatch.Elapsed,
|
||||
ErrorMessage = $"Key {keyId} is not authorized by trust anchor {anchor.AnchorId}",
|
||||
KeyId = keyId
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new VerificationStepResult
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = true,
|
||||
Duration = stopwatch.Elapsed,
|
||||
Details = $"Verified {verifiedKeyIds.Count} key(s) against anchor {anchor.AnchorId}"
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Trust anchor verification failed with exception");
|
||||
return CreateFailedResult(stopwatch.Elapsed, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private VerificationStepResult CreateFailedResult(TimeSpan duration, string error) => new()
|
||||
{
|
||||
StepName = Name,
|
||||
Passed = false,
|
||||
Duration = duration,
|
||||
ErrorMessage = error
|
||||
};
|
||||
}
|
||||
|
||||
#region Supporting Interfaces and Types
|
||||
|
||||
/// <summary>
|
||||
/// Store for proof bundles.
|
||||
/// </summary>
|
||||
public interface IProofBundleStore
|
||||
{
|
||||
Task<ProofBundle?> GetBundleAsync(ProofBundleId bundleId, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope verifier.
|
||||
/// </summary>
|
||||
public interface IDsseVerifier
|
||||
{
|
||||
Task<DsseVerificationResult> VerifyAsync(DsseEnvelope envelope, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of DSSE verification.
|
||||
/// </summary>
|
||||
public sealed record DsseVerificationResult
|
||||
{
|
||||
public required bool IsValid { get; init; }
|
||||
public required string KeyId { get; init; }
|
||||
public string? ErrorMessage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log verifier.
|
||||
/// </summary>
|
||||
public interface IRekorVerifier
|
||||
{
|
||||
Task<RekorVerificationResult> VerifyInclusionAsync(
|
||||
string logId,
|
||||
long logIndex,
|
||||
InclusionProof inclusionProof,
|
||||
SignedTreeHead signedTreeHead,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of Rekor verification.
|
||||
/// </summary>
|
||||
public sealed record RekorVerificationResult
|
||||
{
|
||||
public required bool IsValid { get; init; }
|
||||
public string? ErrorMessage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor resolver.
|
||||
/// </summary>
|
||||
public interface ITrustAnchorResolver
|
||||
{
|
||||
Task<TrustAnchorInfo?> GetAnchorAsync(Guid anchorId, CancellationToken ct = default);
|
||||
Task<TrustAnchorInfo?> FindAnchorForProofAsync(ProofBundleId proofBundleId, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor information.
|
||||
/// </summary>
|
||||
public sealed record TrustAnchorInfo
|
||||
{
|
||||
public required Guid AnchorId { get; init; }
|
||||
public required IReadOnlyList<string> AllowedKeyIds { get; init; }
|
||||
public required IReadOnlyList<string> RevokedKeyIds { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A proof bundle containing statements and envelopes.
|
||||
/// </summary>
|
||||
public sealed record ProofBundle
|
||||
{
|
||||
public required IReadOnlyList<ProofStatement> Statements { get; init; }
|
||||
public required IReadOnlyList<DsseEnvelope> Envelopes { get; init; }
|
||||
public RekorLogEntry? RekorLogEntry { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A statement within a proof bundle.
|
||||
/// </summary>
|
||||
public sealed record ProofStatement
|
||||
{
|
||||
public required string StatementId { get; init; }
|
||||
public required string PredicateType { get; init; }
|
||||
public required object Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record DsseEnvelope
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required byte[] Payload { get; init; }
|
||||
public required IReadOnlyList<DsseSignature> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A signature in a DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record DsseSignature
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required byte[] Sig { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log entry information.
|
||||
/// </summary>
|
||||
public sealed record RekorLogEntry
|
||||
{
|
||||
public required string LogId { get; init; }
|
||||
public required long LogIndex { get; init; }
|
||||
public required InclusionProof InclusionProof { get; init; }
|
||||
public required SignedTreeHead SignedTreeHead { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merkle tree inclusion proof.
|
||||
/// </summary>
|
||||
public sealed record InclusionProof
|
||||
{
|
||||
public required IReadOnlyList<byte[]> Hashes { get; init; }
|
||||
public required long TreeSize { get; init; }
|
||||
public required byte[] RootHash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Signed tree head from transparency log.
|
||||
/// </summary>
|
||||
public sealed record SignedTreeHead
|
||||
{
|
||||
public required long TreeSize { get; init; }
|
||||
public required byte[] RootHash { get; init; }
|
||||
public required byte[] Signature { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,631 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright (c) 2025 StellaOps Contributors
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using NSubstitute;
|
||||
using StellaOps.Attestor.ProofChain;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
using StellaOps.Attestor.ProofChain.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Load tests for proof chain API endpoints and verification pipeline.
|
||||
/// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
|
||||
/// Task: PROOF-API-0012
|
||||
/// </summary>
|
||||
public class ApiLoadTests
|
||||
{
|
||||
private readonly ILogger<VerificationPipeline> _logger = NullLogger<VerificationPipeline>.Instance;
|
||||
|
||||
#region Proof Spine Creation Load Tests
|
||||
|
||||
[Fact]
|
||||
public async Task CreateProofSpine_ConcurrentRequests_MaintainsThroughput()
|
||||
{
|
||||
// Arrange: Create synthetic SBOM entries for load testing
|
||||
const int concurrencyLevel = 50;
|
||||
const int operationsPerClient = 20;
|
||||
var totalOperations = concurrencyLevel * operationsPerClient;
|
||||
|
||||
var proofSpineBuilder = CreateTestProofSpineBuilder();
|
||||
var latencies = new ConcurrentBag<long>();
|
||||
var errors = new ConcurrentBag<Exception>();
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
// Act: Run concurrent proof spine creations
|
||||
var tasks = Enumerable.Range(0, concurrencyLevel)
|
||||
.Select(clientId => Task.Run(async () =>
|
||||
{
|
||||
for (var i = 0; i < operationsPerClient; i++)
|
||||
{
|
||||
try
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var entryId = GenerateSyntheticEntryId(clientId, i);
|
||||
var spine = await proofSpineBuilder.BuildAsync(
|
||||
entryId,
|
||||
GenerateSyntheticEvidenceIds(3),
|
||||
$"sha256:{GenerateHash("reasoning")}",
|
||||
$"sha256:{GenerateHash("vex")}",
|
||||
"v2.3.1",
|
||||
CancellationToken.None);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add(ex);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
stopwatch.Stop();
|
||||
|
||||
// Assert: Verify load test metrics
|
||||
var successCount = latencies.Count;
|
||||
var errorCount = errors.Count;
|
||||
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
|
||||
var avgLatency = latencies.Any() ? latencies.Average() : 0;
|
||||
var p95Latency = CalculatePercentile(latencies, 95);
|
||||
var p99Latency = CalculatePercentile(latencies, 99);
|
||||
|
||||
// Performance assertions
|
||||
successCount.Should().Be(totalOperations, "all operations should complete successfully");
|
||||
errorCount.Should().Be(0, "no errors should occur during load test");
|
||||
throughput.Should().BeGreaterThan(100, "throughput should exceed 100 ops/sec");
|
||||
avgLatency.Should().BeLessThan(50, "average latency should be under 50ms");
|
||||
p99Latency.Should().BeLessThan(200, "p99 latency should be under 200ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerificationPipeline_ConcurrentVerifications_MaintainsAccuracy()
|
||||
{
|
||||
// Arrange
|
||||
const int concurrencyLevel = 30;
|
||||
const int verificationsPerClient = 10;
|
||||
var totalVerifications = concurrencyLevel * verificationsPerClient;
|
||||
|
||||
var mockDsseVerifier = CreateMockDsseVerifier();
|
||||
var mockIdRecomputer = CreateMockIdRecomputer();
|
||||
var mockRekorVerifier = CreateMockRekorVerifier();
|
||||
var pipeline = new VerificationPipeline(
|
||||
mockDsseVerifier,
|
||||
mockIdRecomputer,
|
||||
mockRekorVerifier,
|
||||
_logger);
|
||||
|
||||
var results = new ConcurrentBag<VerificationResult>();
|
||||
var latencies = new ConcurrentBag<long>();
|
||||
|
||||
// Act: Run concurrent verifications
|
||||
var tasks = Enumerable.Range(0, concurrencyLevel)
|
||||
.Select(clientId => Task.Run(async () =>
|
||||
{
|
||||
for (var i = 0; i < verificationsPerClient; i++)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var proof = GenerateSyntheticProof(clientId, i);
|
||||
var result = await pipeline.VerifyAsync(proof, CancellationToken.None);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.ElapsedMilliseconds);
|
||||
results.Add(result);
|
||||
}
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert: All verifications should be deterministic
|
||||
results.Count.Should().Be(totalVerifications);
|
||||
results.All(r => r.IsValid).Should().BeTrue("all synthetic proofs should verify successfully");
|
||||
|
||||
var avgLatency = latencies.Average();
|
||||
avgLatency.Should().BeLessThan(30, "verification should be fast");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Deterministic Ordering Tests Under Load
|
||||
|
||||
[Fact]
|
||||
public void ProofSpineOrdering_UnderConcurrency_RemainsDeterministic()
|
||||
{
|
||||
// Arrange: Same inputs should produce same outputs under concurrent access
|
||||
const int iterations = 100;
|
||||
var seed = 42;
|
||||
var random = new Random(seed);
|
||||
|
||||
var evidenceIds = Enumerable.Range(0, 5)
|
||||
.Select(i => $"sha256:{GenerateHash($"evidence{i}")}")
|
||||
.ToArray();
|
||||
|
||||
var results = new ConcurrentBag<string>();
|
||||
|
||||
// Act: Compute proof spine hash concurrently multiple times
|
||||
Parallel.For(0, iterations, _ =>
|
||||
{
|
||||
var sorted = evidenceIds.OrderBy(x => x).ToArray();
|
||||
var combined = string.Join(":", sorted);
|
||||
var hash = GenerateHash(combined);
|
||||
results.Add(hash);
|
||||
});
|
||||
|
||||
// Assert: All results should be identical (deterministic)
|
||||
results.Distinct().Count().Should().Be(1, "concurrent computations should be deterministic");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MerkleTree_ConcurrentBuilding_ProducesSameRoot()
|
||||
{
|
||||
// Arrange
|
||||
const int leafCount = 1000;
|
||||
const int iterations = 20;
|
||||
|
||||
var leaves = Enumerable.Range(0, leafCount)
|
||||
.Select(i => Encoding.UTF8.GetBytes($"leaf-{i:D5}"))
|
||||
.ToList();
|
||||
|
||||
var roots = new ConcurrentBag<string>();
|
||||
|
||||
// Act: Build Merkle tree concurrently
|
||||
await Parallel.ForEachAsync(Enumerable.Range(0, iterations), async (_, ct) =>
|
||||
{
|
||||
var builder = new MerkleTreeBuilder();
|
||||
foreach (var leaf in leaves)
|
||||
{
|
||||
builder.AddLeaf(leaf);
|
||||
}
|
||||
var root = builder.ComputeRoot();
|
||||
roots.Add(Convert.ToHexString(root));
|
||||
});
|
||||
|
||||
// Assert: All roots should be identical
|
||||
roots.Distinct().Count().Should().Be(1, "Merkle tree root should be deterministic");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Throughput Benchmarks
|
||||
|
||||
[Theory]
|
||||
[InlineData(10, 100)] // Light load
|
||||
[InlineData(50, 50)] // Medium load
|
||||
[InlineData(100, 20)] // Heavy load
|
||||
public async Task ThroughputBenchmark_VariousLoadProfiles(int concurrency, int opsPerClient)
|
||||
{
|
||||
// Arrange
|
||||
var totalOps = concurrency * opsPerClient;
|
||||
var successCount = 0;
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
// Act: Simulate API calls
|
||||
var tasks = Enumerable.Range(0, concurrency)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
for (var i = 0; i < opsPerClient; i++)
|
||||
{
|
||||
// Simulate proof creation work
|
||||
var hash = GenerateHash($"proof-{Guid.NewGuid()}");
|
||||
Interlocked.Increment(ref successCount);
|
||||
}
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
stopwatch.Stop();
|
||||
|
||||
// Assert
|
||||
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
|
||||
successCount.Should().Be(totalOps);
|
||||
throughput.Should().BeGreaterThan(1000, $"throughput at {concurrency} concurrency should exceed 1000 ops/sec");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LatencyDistribution_UnderLoad_MeetsSloBudgets()
|
||||
{
|
||||
// Arrange: Define SLO budgets
|
||||
const double maxP50Ms = 10;
|
||||
const double maxP90Ms = 25;
|
||||
const double maxP99Ms = 100;
|
||||
const int sampleSize = 1000;
|
||||
|
||||
var latencies = new ConcurrentBag<double>();
|
||||
|
||||
// Act: Collect latency samples
|
||||
await Parallel.ForEachAsync(Enumerable.Range(0, sampleSize), async (i, ct) =>
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
// Simulate verification work
|
||||
var hash = GenerateHash($"sample-{i}");
|
||||
await Task.Delay(1, ct); // Simulate I/O
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
});
|
||||
|
||||
// Calculate percentiles
|
||||
var sorted = latencies.OrderBy(x => x).ToList();
|
||||
var p50 = CalculatePercentileFromSorted(sorted, 50);
|
||||
var p90 = CalculatePercentileFromSorted(sorted, 90);
|
||||
var p99 = CalculatePercentileFromSorted(sorted, 99);
|
||||
|
||||
// Assert: SLO compliance
|
||||
p50.Should().BeLessThan(maxP50Ms, "p50 latency should meet SLO");
|
||||
p90.Should().BeLessThan(maxP90Ms, "p90 latency should meet SLO");
|
||||
p99.Should().BeLessThan(maxP99Ms, "p99 latency should meet SLO");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Memory and Resource Tests
|
||||
|
||||
[Fact]
|
||||
public void LargeProofBatch_DoesNotCauseMemorySpike()
|
||||
{
|
||||
// Arrange
|
||||
const int batchSize = 10_000;
|
||||
var initialMemory = GC.GetTotalMemory(true);
|
||||
|
||||
// Act: Create large batch of proofs
|
||||
var proofs = new List<string>(batchSize);
|
||||
for (var i = 0; i < batchSize; i++)
|
||||
{
|
||||
var proof = GenerateSyntheticProofJson(i);
|
||||
proofs.Add(proof);
|
||||
}
|
||||
|
||||
// Force GC and measure
|
||||
var peakMemory = GC.GetTotalMemory(false);
|
||||
proofs.Clear();
|
||||
GC.Collect();
|
||||
var finalMemory = GC.GetTotalMemory(true);
|
||||
|
||||
// Assert: Memory should not grow unbounded
|
||||
var memoryGrowth = peakMemory - initialMemory;
|
||||
var memoryRetained = finalMemory - initialMemory;
|
||||
|
||||
// Each proof is ~500 bytes, so 10k proofs ≈ 5MB is reasonable
|
||||
memoryGrowth.Should().BeLessThan(50_000_000, "memory growth should be bounded (~50MB max for 10k proofs)");
|
||||
memoryRetained.Should().BeLessThan(10_000_000, "memory should be released after clearing");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static IProofSpineBuilder CreateTestProofSpineBuilder()
|
||||
{
|
||||
// Create a mock proof spine builder for load testing
|
||||
var builder = Substitute.For<IProofSpineBuilder>();
|
||||
builder.BuildAsync(
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<string[]>(),
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<CancellationToken>())
|
||||
.Returns(callInfo =>
|
||||
{
|
||||
var entryId = callInfo.ArgAt<string>(0);
|
||||
return Task.FromResult(new ProofSpine
|
||||
{
|
||||
EntryId = entryId,
|
||||
SpineId = $"sha256:{GenerateHash(entryId)}",
|
||||
PolicyVersion = callInfo.ArgAt<string>(4),
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
});
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static IDsseVerifier CreateMockDsseVerifier()
|
||||
{
|
||||
var verifier = Substitute.For<IDsseVerifier>();
|
||||
verifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult(new DsseVerificationResult { IsValid = true }));
|
||||
return verifier;
|
||||
}
|
||||
|
||||
private static IIdRecomputer CreateMockIdRecomputer()
|
||||
{
|
||||
var recomputer = Substitute.For<IIdRecomputer>();
|
||||
recomputer.VerifyAsync(Arg.Any<ProofBundle>(), Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult(new IdVerificationResult { IsValid = true }));
|
||||
return recomputer;
|
||||
}
|
||||
|
||||
private static IRekorVerifier CreateMockRekorVerifier()
|
||||
{
|
||||
var verifier = Substitute.For<IRekorVerifier>();
|
||||
verifier.VerifyInclusionAsync(Arg.Any<RekorEntry>(), Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult(new RekorVerificationResult { IsValid = true }));
|
||||
return verifier;
|
||||
}
|
||||
|
||||
private static string GenerateSyntheticEntryId(int clientId, int index)
|
||||
{
|
||||
var hash = GenerateHash($"entry-{clientId}-{index}");
|
||||
return $"sha256:{hash}:pkg:npm/example@1.0.{index}";
|
||||
}
|
||||
|
||||
private static string[] GenerateSyntheticEvidenceIds(int count)
|
||||
{
|
||||
return Enumerable.Range(0, count)
|
||||
.Select(i => $"sha256:{GenerateHash($"evidence-{i}")}")
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static ProofBundle GenerateSyntheticProof(int clientId, int index)
|
||||
{
|
||||
return new ProofBundle
|
||||
{
|
||||
EntryId = GenerateSyntheticEntryId(clientId, index),
|
||||
Envelope = new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.stellaops.proof+json",
|
||||
Payload = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{{\"id\":\"{clientId}-{index}\"}}")),
|
||||
Signatures = new[]
|
||||
{
|
||||
new DsseSignature
|
||||
{
|
||||
KeyId = "test-key",
|
||||
Sig = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature"))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateSyntheticProofJson(int index)
|
||||
{
|
||||
return $@"{{
|
||||
""entryId"": ""sha256:{GenerateHash($"entry-{index}")}:pkg:npm/example@1.0.{index}"",
|
||||
""spineId"": ""sha256:{GenerateHash($"spine-{index}")}"",
|
||||
""evidenceIds"": [""{GenerateHash($"ev1-{index}")}"", ""{GenerateHash($"ev2-{index}")}""],
|
||||
""reasoningId"": ""sha256:{GenerateHash($"reason-{index}")}"",
|
||||
""vexVerdictId"": ""sha256:{GenerateHash($"vex-{index}")}"",
|
||||
""policyVersion"": ""v2.3.1"",
|
||||
""createdAt"": ""{DateTimeOffset.UtcNow:O}""
|
||||
}}";
|
||||
}
|
||||
|
||||
private static string GenerateHash(string input)
|
||||
{
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static double CalculatePercentile(ConcurrentBag<long> values, int percentile)
|
||||
{
|
||||
if (!values.Any()) return 0;
|
||||
var sorted = values.OrderBy(x => x).ToList();
|
||||
return CalculatePercentileFromSorted(sorted.Select(x => (double)x).ToList(), percentile);
|
||||
}
|
||||
|
||||
private static double CalculatePercentileFromSorted<T>(List<T> sorted, int percentile) where T : IConvertible
|
||||
{
|
||||
if (sorted.Count == 0) return 0;
|
||||
var index = (int)Math.Ceiling(percentile / 100.0 * sorted.Count) - 1;
|
||||
index = Math.Max(0, Math.Min(index, sorted.Count - 1));
|
||||
return sorted[index].ToDouble(null);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Supporting Types for Load Tests
|
||||
|
||||
/// <summary>
|
||||
/// Interface for proof spine building (mock target for load tests).
|
||||
/// </summary>
|
||||
public interface IProofSpineBuilder
|
||||
{
|
||||
Task<ProofSpine> BuildAsync(
|
||||
string entryId,
|
||||
string[] evidenceIds,
|
||||
string reasoningId,
|
||||
string vexVerdictId,
|
||||
string policyVersion,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a proof spine created for an SBOM entry.
|
||||
/// </summary>
|
||||
public class ProofSpine
|
||||
{
|
||||
public required string EntryId { get; init; }
|
||||
public required string SpineId { get; init; }
|
||||
public required string PolicyVersion { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for DSSE envelope verification.
|
||||
/// </summary>
|
||||
public interface IDsseVerifier
|
||||
{
|
||||
Task<DsseVerificationResult> VerifyAsync(DsseEnvelope envelope, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE verification result.
|
||||
/// </summary>
|
||||
public class DsseVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for ID recomputation verification.
|
||||
/// </summary>
|
||||
public interface IIdRecomputer
|
||||
{
|
||||
Task<IdVerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ID verification result.
|
||||
/// </summary>
|
||||
public class IdVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public string? ExpectedId { get; init; }
|
||||
public string? ActualId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for Rekor inclusion proof verification.
|
||||
/// </summary>
|
||||
public interface IRekorVerifier
|
||||
{
|
||||
Task<RekorVerificationResult> VerifyInclusionAsync(RekorEntry entry, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor verification result.
|
||||
/// </summary>
|
||||
public class RekorVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public long? LogIndex { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a Rekor transparency log entry.
|
||||
/// </summary>
|
||||
public class RekorEntry
|
||||
{
|
||||
public long LogIndex { get; init; }
|
||||
public string? LogId { get; init; }
|
||||
public string? Body { get; init; }
|
||||
public DateTimeOffset IntegratedTime { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope for proof bundles.
|
||||
/// </summary>
|
||||
public class DsseEnvelope
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required DsseSignature[] Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature within an envelope.
|
||||
/// </summary>
|
||||
public class DsseSignature
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete proof bundle for verification.
|
||||
/// </summary>
|
||||
public class ProofBundle
|
||||
{
|
||||
public required string EntryId { get; init; }
|
||||
public required DsseEnvelope Envelope { get; init; }
|
||||
public RekorEntry? RekorEntry { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete verification result from the pipeline.
|
||||
/// </summary>
|
||||
public class VerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public DsseVerificationResult? DsseResult { get; init; }
|
||||
public IdVerificationResult? IdResult { get; init; }
|
||||
public RekorVerificationResult? RekorResult { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verification pipeline that runs all verification steps.
|
||||
/// </summary>
|
||||
public class VerificationPipeline
|
||||
{
|
||||
private readonly IDsseVerifier _dsseVerifier;
|
||||
private readonly IIdRecomputer _idRecomputer;
|
||||
private readonly IRekorVerifier _rekorVerifier;
|
||||
private readonly ILogger<VerificationPipeline> _logger;
|
||||
|
||||
public VerificationPipeline(
|
||||
IDsseVerifier dsseVerifier,
|
||||
IIdRecomputer idRecomputer,
|
||||
IRekorVerifier rekorVerifier,
|
||||
ILogger<VerificationPipeline> logger)
|
||||
{
|
||||
_dsseVerifier = dsseVerifier;
|
||||
_idRecomputer = idRecomputer;
|
||||
_rekorVerifier = rekorVerifier;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<VerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken)
|
||||
{
|
||||
// Step 1: DSSE signature verification
|
||||
var dsseResult = await _dsseVerifier.VerifyAsync(bundle.Envelope, cancellationToken);
|
||||
if (!dsseResult.IsValid)
|
||||
{
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
DsseResult = dsseResult,
|
||||
Error = $"DSSE verification failed: {dsseResult.Error}"
|
||||
};
|
||||
}
|
||||
|
||||
// Step 2: ID recomputation
|
||||
var idResult = await _idRecomputer.VerifyAsync(bundle, cancellationToken);
|
||||
if (!idResult.IsValid)
|
||||
{
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
DsseResult = dsseResult,
|
||||
IdResult = idResult,
|
||||
Error = $"ID mismatch: expected {idResult.ExpectedId}, got {idResult.ActualId}"
|
||||
};
|
||||
}
|
||||
|
||||
// Step 3: Rekor inclusion (if entry present)
|
||||
RekorVerificationResult? rekorResult = null;
|
||||
if (bundle.RekorEntry != null)
|
||||
{
|
||||
rekorResult = await _rekorVerifier.VerifyInclusionAsync(bundle.RekorEntry, cancellationToken);
|
||||
if (!rekorResult.IsValid)
|
||||
{
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
DsseResult = dsseResult,
|
||||
IdResult = idResult,
|
||||
RekorResult = rekorResult,
|
||||
Error = $"Rekor verification failed: {rekorResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = true,
|
||||
DsseResult = dsseResult,
|
||||
IdResult = idResult,
|
||||
RekorResult = rekorResult
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -13,7 +13,10 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.24407.12" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -0,0 +1,465 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerificationPipelineIntegrationTests.cs
|
||||
// Sprint: SPRINT_0501_0001_0001_proof_evidence_chain_master
|
||||
// Task: PROOF-MASTER-0002
|
||||
// Description: Integration tests for the full proof chain verification pipeline
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using NSubstitute;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the verification pipeline.
|
||||
/// Tests PROOF-MASTER-0002: Full proof chain verification flow.
|
||||
/// </summary>
|
||||
public class VerificationPipelineIntegrationTests
|
||||
{
|
||||
private readonly IProofBundleStore _proofStore;
|
||||
private readonly IDsseVerifier _dsseVerifier;
|
||||
private readonly IRekorVerifier _rekorVerifier;
|
||||
private readonly ITrustAnchorResolver _trustAnchorResolver;
|
||||
private readonly ILogger<VerificationPipeline> _logger;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public VerificationPipelineIntegrationTests()
|
||||
{
|
||||
_proofStore = Substitute.For<IProofBundleStore>();
|
||||
_dsseVerifier = Substitute.For<IDsseVerifier>();
|
||||
_rekorVerifier = Substitute.For<IRekorVerifier>();
|
||||
_trustAnchorResolver = Substitute.For<ITrustAnchorResolver>();
|
||||
_logger = NullLogger<VerificationPipeline>.Instance;
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 17, 12, 0, 0, TimeSpan.Zero));
|
||||
}
|
||||
|
||||
#region Full Pipeline Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidProofBundle_AllStepsPass()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:valid123");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true,
|
||||
VerifierVersion = "1.0.0-test"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Receipt.Result.Should().Be(VerificationResult.Pass);
|
||||
result.Steps.Should().HaveCount(4);
|
||||
result.Steps.Should().OnlyContain(s => s.Passed);
|
||||
result.FirstFailure.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_InvalidDsseSignature_FailsAtFirstStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:invalid-sig");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupInvalidDsseVerification(keyId, "Signature mismatch");
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Receipt.Result.Should().Be(VerificationResult.Fail);
|
||||
result.FirstFailure.Should().NotBeNull();
|
||||
result.FirstFailure!.StepName.Should().Be("dsse_signature");
|
||||
result.Receipt.FailureReason.Should().Contain("Signature mismatch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputation()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:wrong-id");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupBundleWithWrongId(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.StepName == "id_recomputation" && !s.Passed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_NoRekorEntry_FailsAtRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:no-rekor");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupBundleWithoutRekor(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.StepName == "rekor_inclusion" && !s.Passed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:skip-rekor");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupBundleWithoutRekor(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidTrustAnchor(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false // Skip Rekor
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
|
||||
rekorStep.Should().NotBeNull();
|
||||
rekorStep!.Passed.Should().BeTrue();
|
||||
rekorStep.Details.Should().Contain("skipped");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchor()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:bad-key");
|
||||
var keyId = "unauthorized-key";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupTrustAnchorWithoutKey(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.StepName == "trust_anchor" && !s.Passed);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Receipt Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_GeneratesReceipt_WithCorrectFields()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:receipt-test");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifierVersion = "2.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Receipt.Should().NotBeNull();
|
||||
result.Receipt.ReceiptId.Should().StartWith("receipt:");
|
||||
result.Receipt.VerifierVersion.Should().Be("2.0.0");
|
||||
result.Receipt.ProofBundleId.Should().Be(bundleId.Value);
|
||||
result.Receipt.StepsSummary.Should().HaveCount(4);
|
||||
result.Receipt.TotalDurationMs.Should().BeGreaterOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FailingPipeline_ReceiptContainsFailureReason()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:fail-receipt");
|
||||
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns((ProofBundle?)null);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Receipt.Result.Should().Be(VerificationResult.Fail);
|
||||
result.Receipt.FailureReason.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cancellation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_Cancelled_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:cancel-test");
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request, cts.Token);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.ErrorMessage?.Contains("cancelled") == true);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private VerificationPipeline CreatePipeline()
|
||||
{
|
||||
return VerificationPipeline.CreateDefault(
|
||||
_proofStore,
|
||||
_dsseVerifier,
|
||||
_rekorVerifier,
|
||||
_trustAnchorResolver,
|
||||
_logger,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
private void SetupValidBundle(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
var bundle = CreateTestBundle(keyId, includeRekor: true);
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns(bundle);
|
||||
}
|
||||
|
||||
private void SetupBundleWithWrongId(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
// Create a bundle but the ID won't match when recomputed
|
||||
var bundle = new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:wrong-statement-id", // Won't match content
|
||||
PredicateType = "evidence.stella/v1",
|
||||
Predicate = new { test = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = "test"u8.ToArray(),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
|
||||
}
|
||||
}
|
||||
},
|
||||
RekorLogEntry = CreateTestRekorEntry()
|
||||
};
|
||||
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns(bundle);
|
||||
}
|
||||
|
||||
private void SetupBundleWithoutRekor(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
var bundle = CreateTestBundle(keyId, includeRekor: false);
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns(bundle);
|
||||
}
|
||||
|
||||
private void SetupValidDsseVerification(string keyId)
|
||||
{
|
||||
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
|
||||
.Returns(new DsseVerificationResult { IsValid = true, KeyId = keyId });
|
||||
}
|
||||
|
||||
private void SetupInvalidDsseVerification(string keyId, string error)
|
||||
{
|
||||
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
|
||||
.Returns(new DsseVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = keyId,
|
||||
ErrorMessage = error
|
||||
});
|
||||
}
|
||||
|
||||
private void SetupValidRekorVerification()
|
||||
{
|
||||
_rekorVerifier.VerifyInclusionAsync(
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<long>(),
|
||||
Arg.Any<InclusionProof>(),
|
||||
Arg.Any<SignedTreeHead>(),
|
||||
Arg.Any<CancellationToken>())
|
||||
.Returns(new RekorVerificationResult { IsValid = true });
|
||||
}
|
||||
|
||||
private void SetupValidTrustAnchor(string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = Guid.NewGuid(),
|
||||
AllowedKeyIds = new List<string> { keyId },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolver.GetAnchorAsync(Arg.Any<Guid>(), Arg.Any<CancellationToken>())
|
||||
.Returns(anchor);
|
||||
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
|
||||
.Returns(anchor);
|
||||
}
|
||||
|
||||
private void SetupTrustAnchorWithoutKey(string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = Guid.NewGuid(),
|
||||
AllowedKeyIds = new List<string> { "different-key" },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
|
||||
.Returns(anchor);
|
||||
}
|
||||
|
||||
private static ProofBundle CreateTestBundle(string keyId, bool includeRekor)
|
||||
{
|
||||
return new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:test-statement",
|
||||
PredicateType = "evidence.stella/v1",
|
||||
Predicate = new { test = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = "test"u8.ToArray(),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
|
||||
}
|
||||
}
|
||||
},
|
||||
RekorLogEntry = includeRekor ? CreateTestRekorEntry() : null
|
||||
};
|
||||
}
|
||||
|
||||
private static RekorLogEntry CreateTestRekorEntry()
|
||||
{
|
||||
return new RekorLogEntry
|
||||
{
|
||||
LogId = "test-log",
|
||||
LogIndex = 12345,
|
||||
InclusionProof = new InclusionProof
|
||||
{
|
||||
Hashes = new List<byte[]> { new byte[] { 0x01 } },
|
||||
TreeSize = 1000,
|
||||
RootHash = new byte[] { 0x02 }
|
||||
},
|
||||
SignedTreeHead = new SignedTreeHead
|
||||
{
|
||||
TreeSize = 1000,
|
||||
RootHash = new byte[] { 0x02 },
|
||||
Signature = new byte[] { 0x03 }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for testing.
|
||||
/// </summary>
|
||||
internal sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset initialTime)
|
||||
{
|
||||
_now = initialTime;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
|
||||
public void SetTime(DateTimeOffset time) => _now = time;
|
||||
}
|
||||
@@ -0,0 +1,484 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerificationPipelineTests.cs
|
||||
// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
|
||||
// Task: PROOF-API-0011 - Integration tests for verification pipeline
|
||||
// Description: Tests for the full verification pipeline including DSSE, ID
|
||||
// recomputation, Rekor inclusion, and trust anchor verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Receipts;
|
||||
using StellaOps.Attestor.ProofChain.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the verification pipeline.
|
||||
/// </summary>
|
||||
public class VerificationPipelineTests
|
||||
{
|
||||
private readonly Mock<IProofBundleStore> _proofStoreMock;
|
||||
private readonly Mock<IDsseVerifier> _dsseVerifierMock;
|
||||
private readonly Mock<IRekorVerifier> _rekorVerifierMock;
|
||||
private readonly Mock<ITrustAnchorResolver> _trustAnchorResolverMock;
|
||||
private readonly VerificationPipeline _pipeline;
|
||||
|
||||
public VerificationPipelineTests()
|
||||
{
|
||||
_proofStoreMock = new Mock<IProofBundleStore>();
|
||||
_dsseVerifierMock = new Mock<IDsseVerifier>();
|
||||
_rekorVerifierMock = new Mock<IRekorVerifier>();
|
||||
_trustAnchorResolverMock = new Mock<ITrustAnchorResolver>();
|
||||
|
||||
_pipeline = VerificationPipeline.CreateDefault(
|
||||
_proofStoreMock.Object,
|
||||
_dsseVerifierMock.Object,
|
||||
_rekorVerifierMock.Object,
|
||||
_trustAnchorResolverMock.Object,
|
||||
NullLogger<VerificationPipeline>.Instance);
|
||||
}
|
||||
|
||||
#region Full Pipeline Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_AllStepsPass_ReturnsValidResult()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var anchorId = Guid.NewGuid();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(VerificationResult.Pass, result.Receipt.Result);
|
||||
Assert.All(result.Steps, step => Assert.True(step.Passed));
|
||||
Assert.Null(result.FirstFailure);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_DsseSignatureInvalid_FailsAtDsseStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "invalid-key";
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupInvalidDsseVerification("Signature verification failed");
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
|
||||
Assert.NotNull(result.FirstFailure);
|
||||
Assert.Equal("dsse_signature", result.FirstFailure.StepName);
|
||||
Assert.Contains("Signature verification failed", result.FirstFailure.ErrorMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputationStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
|
||||
// Setup a bundle with mismatched ID
|
||||
SetupProofBundleWithMismatchedId(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
var idStep = result.Steps.FirstOrDefault(s => s.StepName == "id_recomputation");
|
||||
Assert.NotNull(idStep);
|
||||
// Note: The actual result depends on how the bundle is constructed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_RekorInclusionFails_FailsAtRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupInvalidRekorVerification("Inclusion proof invalid");
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
|
||||
Assert.NotNull(rekorStep);
|
||||
Assert.False(rekorStep.Passed);
|
||||
Assert.Contains("Inclusion proof invalid", rekorStep.ErrorMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var anchorId = Guid.NewGuid();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId, includeRekorEntry: false);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidTrustAnchor(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
|
||||
Assert.NotNull(rekorStep);
|
||||
Assert.True(rekorStep.Passed);
|
||||
Assert.Contains("skipped", rekorStep.Details, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "unauthorized-key";
|
||||
var anchorId = Guid.NewGuid();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupTrustAnchorWithoutKey(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
var anchorStep = result.Steps.FirstOrDefault(s => s.StepName == "trust_anchor");
|
||||
Assert.NotNull(anchorStep);
|
||||
Assert.False(anchorStep.Passed);
|
||||
Assert.Contains("not authorized", anchorStep.ErrorMessage);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Receipt Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_GeneratesReceiptWithCorrectFields()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var anchorId = Guid.NewGuid();
|
||||
var verifierVersion = "2.0.0";
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true,
|
||||
VerifierVersion = verifierVersion
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.Receipt);
|
||||
Assert.NotEmpty(result.Receipt.ReceiptId);
|
||||
Assert.Equal(bundleId.Value, result.Receipt.ProofBundleId);
|
||||
Assert.Equal(verifierVersion, result.Receipt.VerifierVersion);
|
||||
Assert.True(result.Receipt.TotalDurationMs >= 0);
|
||||
Assert.NotEmpty(result.Receipt.StepsSummary!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FailedVerification_ReceiptContainsFailureReason()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
|
||||
_proofStoreMock
|
||||
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ProofBundle?)null);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
|
||||
Assert.NotNull(result.Receipt.FailureReason);
|
||||
Assert.Contains("not found", result.Receipt.FailureReason);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cancellation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_Cancelled_ReturnsPartialResults()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
|
||||
// Setup DSSE verification to cancel
|
||||
_dsseVerifierMock
|
||||
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (DsseEnvelope _, CancellationToken ct) =>
|
||||
{
|
||||
await cts.CancelAsync();
|
||||
ct.ThrowIfCancellationRequested();
|
||||
return new DsseVerificationResult { IsValid = true, KeyId = keyId };
|
||||
});
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act & Assert - should complete but show cancellation
|
||||
// The actual behavior depends on implementation
|
||||
var result = await _pipeline.VerifyAsync(request, cts.Token);
|
||||
// Pipeline may handle cancellation gracefully
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static ProofBundleId CreateTestBundleId()
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()));
|
||||
return new ProofBundleId($"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}");
|
||||
}
|
||||
|
||||
private void SetupValidProofBundle(ProofBundleId bundleId, string keyId, bool includeRekorEntry = true)
|
||||
{
|
||||
var bundle = new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:statement123",
|
||||
PredicateType = "https://stella-ops.io/v1/evidence",
|
||||
Predicate = new { test = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = Encoding.UTF8.GetBytes("{}"),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
|
||||
}
|
||||
}
|
||||
},
|
||||
RekorLogEntry = includeRekorEntry ? new RekorLogEntry
|
||||
{
|
||||
LogId = "test-log",
|
||||
LogIndex = 12345,
|
||||
InclusionProof = new InclusionProof
|
||||
{
|
||||
Hashes = new List<byte[]>(),
|
||||
TreeSize = 100,
|
||||
RootHash = new byte[32]
|
||||
},
|
||||
SignedTreeHead = new SignedTreeHead
|
||||
{
|
||||
TreeSize = 100,
|
||||
RootHash = new byte[32],
|
||||
Signature = new byte[64]
|
||||
}
|
||||
} : null
|
||||
};
|
||||
|
||||
_proofStoreMock
|
||||
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(bundle);
|
||||
}
|
||||
|
||||
private void SetupProofBundleWithMismatchedId(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
// Create a bundle that will compute to a different ID
|
||||
var bundle = new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:differentstatement",
|
||||
PredicateType = "https://stella-ops.io/v1/evidence",
|
||||
Predicate = new { different = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = Encoding.UTF8.GetBytes("{\"different\":\"payload\"}"),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
_proofStoreMock
|
||||
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(bundle);
|
||||
}
|
||||
|
||||
private void SetupValidDsseVerification(string keyId)
|
||||
{
|
||||
_dsseVerifierMock
|
||||
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new DsseVerificationResult { IsValid = true, KeyId = keyId });
|
||||
}
|
||||
|
||||
private void SetupInvalidDsseVerification(string errorMessage)
|
||||
{
|
||||
_dsseVerifierMock
|
||||
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new DsseVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = "unknown",
|
||||
ErrorMessage = errorMessage
|
||||
});
|
||||
}
|
||||
|
||||
private void SetupValidRekorVerification()
|
||||
{
|
||||
_rekorVerifierMock
|
||||
.Setup(x => x.VerifyInclusionAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<InclusionProof>(),
|
||||
It.IsAny<SignedTreeHead>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new RekorVerificationResult { IsValid = true });
|
||||
}
|
||||
|
||||
private void SetupInvalidRekorVerification(string errorMessage)
|
||||
{
|
||||
_rekorVerifierMock
|
||||
.Setup(x => x.VerifyInclusionAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<InclusionProof>(),
|
||||
It.IsAny<SignedTreeHead>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new RekorVerificationResult { IsValid = false, ErrorMessage = errorMessage });
|
||||
}
|
||||
|
||||
private void SetupValidTrustAnchor(Guid anchorId, string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = anchorId,
|
||||
AllowedKeyIds = new List<string> { keyId },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolverMock
|
||||
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(anchor);
|
||||
|
||||
_trustAnchorResolverMock
|
||||
.Setup(x => x.GetAnchorAsync(anchorId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(anchor);
|
||||
}
|
||||
|
||||
private void SetupTrustAnchorWithoutKey(Guid anchorId, string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = anchorId,
|
||||
AllowedKeyIds = new List<string> { "other-key-not-matching" },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolverMock
|
||||
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(anchor);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user