up the blokcing tasks
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Risk Bundle CI / risk-bundle-build (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Risk Bundle CI / risk-bundle-offline-kit (push) Has been cancelled
Risk Bundle CI / publish-checksums (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Risk Bundle CI / risk-bundle-build (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Risk Bundle CI / risk-bundle-offline-kit (push) Has been cancelled
Risk Bundle CI / publish-checksums (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
This commit is contained in:
@@ -0,0 +1,411 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.AirGap;
|
||||
|
||||
/// <summary>
|
||||
/// Validates network intents declared in job payloads.
|
||||
/// Per ORCH-AIRGAP-56-001: Enforce job descriptors to declare network intents.
|
||||
/// </summary>
|
||||
public interface INetworkIntentValidator
|
||||
{
|
||||
/// <summary>
|
||||
/// Validates network intents for a job payload.
|
||||
/// </summary>
|
||||
/// <param name="jobType">The job type.</param>
|
||||
/// <param name="payload">The job payload JSON.</param>
|
||||
/// <param name="config">Network intent configuration.</param>
|
||||
/// <param name="isSealed">Whether the environment is in sealed mode.</param>
|
||||
/// <returns>Validation result.</returns>
|
||||
NetworkIntentValidationResult ValidateForJob(
|
||||
string jobType,
|
||||
string payload,
|
||||
NetworkIntentConfig config,
|
||||
bool isSealed);
|
||||
|
||||
/// <summary>
|
||||
/// Extracts network endpoints from a job payload.
|
||||
/// </summary>
|
||||
/// <param name="payload">The job payload JSON.</param>
|
||||
/// <returns>List of detected network endpoints.</returns>
|
||||
IReadOnlyList<string> ExtractNetworkEndpoints(string payload);
|
||||
|
||||
/// <summary>
|
||||
/// Extracts declared network intents from a job payload.
|
||||
/// </summary>
|
||||
/// <param name="payload">The job payload JSON.</param>
|
||||
/// <returns>List of declared network intents.</returns>
|
||||
IReadOnlyList<NetworkIntent> ExtractDeclaredIntents(string payload);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of network intent validator.
|
||||
/// </summary>
|
||||
public sealed partial class NetworkIntentValidator : INetworkIntentValidator
|
||||
{
|
||||
private readonly ILogger<NetworkIntentValidator> _logger;
|
||||
|
||||
// Common URL/endpoint field names in payloads
|
||||
private static readonly string[] UrlFieldNames =
|
||||
[
|
||||
"destinationUri",
|
||||
"callbackUrl",
|
||||
"webhookUrl",
|
||||
"endpoint",
|
||||
"url",
|
||||
"uri",
|
||||
"host",
|
||||
"server",
|
||||
"apiUrl",
|
||||
"serviceUrl",
|
||||
"notifyUrl",
|
||||
"targetUrl",
|
||||
"registryUrl",
|
||||
"collectorEndpoint"
|
||||
];
|
||||
|
||||
public NetworkIntentValidator(ILogger<NetworkIntentValidator> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public NetworkIntentValidationResult ValidateForJob(
|
||||
string jobType,
|
||||
string payload,
|
||||
NetworkIntentConfig config,
|
||||
bool isSealed)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(jobType);
|
||||
ArgumentException.ThrowIfNullOrEmpty(payload);
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
// If enforcement is disabled, always pass
|
||||
if (config.EnforcementMode == EnforcementMode.Disabled)
|
||||
{
|
||||
_logger.LogDebug("Network intent enforcement disabled for job type {JobType}", jobType);
|
||||
return NetworkIntentValidationResult.Success();
|
||||
}
|
||||
|
||||
// If not in sealed mode and not requiring explicit intents, pass
|
||||
if (!isSealed && !config.RequireExplicitIntents)
|
||||
{
|
||||
return NetworkIntentValidationResult.Success();
|
||||
}
|
||||
|
||||
var detectedEndpoints = ExtractNetworkEndpoints(payload);
|
||||
var declaredIntents = ExtractDeclaredIntents(payload);
|
||||
|
||||
// If no network endpoints detected, pass
|
||||
if (detectedEndpoints.Count == 0)
|
||||
{
|
||||
return NetworkIntentValidationResult.Success();
|
||||
}
|
||||
|
||||
var violations = new List<NetworkIntentViolation>();
|
||||
var shouldBlock = config.EnforcementMode == EnforcementMode.Strict && isSealed;
|
||||
|
||||
// Check for undeclared endpoints (if requiring explicit intents)
|
||||
if (config.RequireExplicitIntents)
|
||||
{
|
||||
var declaredHosts = declaredIntents
|
||||
.Select(i => i.Host.ToLowerInvariant())
|
||||
.ToHashSet();
|
||||
|
||||
foreach (var endpoint in detectedEndpoints)
|
||||
{
|
||||
var host = ExtractHostFromEndpoint(endpoint);
|
||||
if (host is not null && !declaredHosts.Contains(host.ToLowerInvariant()))
|
||||
{
|
||||
// Check if any declared intent pattern matches
|
||||
var matchingIntent = declaredIntents.FirstOrDefault(i =>
|
||||
HostMatchesPattern(host, i.Host));
|
||||
|
||||
if (matchingIntent is null)
|
||||
{
|
||||
violations.Add(new NetworkIntentViolation(
|
||||
endpoint,
|
||||
NetworkViolationType.MissingIntent,
|
||||
null));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In sealed mode, validate declared intents against allowlist
|
||||
if (isSealed && config.Allowlist is { Count: > 0 })
|
||||
{
|
||||
foreach (var intent in declaredIntents)
|
||||
{
|
||||
var isAllowed = config.Allowlist.Any(entry => intent.MatchesAllowlistEntry(entry));
|
||||
if (!isAllowed)
|
||||
{
|
||||
violations.Add(new NetworkIntentViolation(
|
||||
$"{intent.Protocol}://{intent.Host}:{intent.Port ?? 443}",
|
||||
NetworkViolationType.NotInAllowlist,
|
||||
intent));
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (isSealed && (config.Allowlist is null || config.Allowlist.Count == 0))
|
||||
{
|
||||
// Sealed mode with no allowlist - all external network access is blocked
|
||||
foreach (var intent in declaredIntents)
|
||||
{
|
||||
violations.Add(new NetworkIntentViolation(
|
||||
$"{intent.Protocol}://{intent.Host}:{intent.Port ?? 443}",
|
||||
NetworkViolationType.NotInAllowlist,
|
||||
intent));
|
||||
}
|
||||
}
|
||||
|
||||
// Check for blocked protocols
|
||||
if (config.BlockedProtocols is { Count: > 0 })
|
||||
{
|
||||
foreach (var intent in declaredIntents)
|
||||
{
|
||||
if (config.BlockedProtocols.Contains(intent.Protocol, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
violations.Add(new NetworkIntentViolation(
|
||||
$"{intent.Protocol}://{intent.Host}",
|
||||
NetworkViolationType.BlockedProtocol,
|
||||
intent));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (violations.Count == 0)
|
||||
{
|
||||
return NetworkIntentValidationResult.Success();
|
||||
}
|
||||
|
||||
// Log violations
|
||||
foreach (var violation in violations)
|
||||
{
|
||||
if (shouldBlock)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Network intent violation for job type {JobType}: {ViolationType} - {Endpoint}",
|
||||
jobType, violation.ViolationType, violation.Endpoint);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Network intent warning for job type {JobType}: {ViolationType} - {Endpoint}",
|
||||
jobType, violation.ViolationType, violation.Endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
// Build result based on violation types
|
||||
var hasMissingIntents = violations.Any(v => v.ViolationType == NetworkViolationType.MissingIntent);
|
||||
var hasDisallowed = violations.Any(v => v.ViolationType == NetworkViolationType.NotInAllowlist);
|
||||
|
||||
if (hasMissingIntents && !hasDisallowed)
|
||||
{
|
||||
var missingEndpoints = violations
|
||||
.Where(v => v.ViolationType == NetworkViolationType.MissingIntent)
|
||||
.Select(v => v.Endpoint)
|
||||
.ToList();
|
||||
return NetworkIntentValidationResult.MissingIntents(missingEndpoints, shouldBlock);
|
||||
}
|
||||
|
||||
return NetworkIntentValidationResult.DisallowedIntents(violations, shouldBlock);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IReadOnlyList<string> ExtractNetworkEndpoints(string payload)
|
||||
{
|
||||
var endpoints = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(payload);
|
||||
ExtractEndpointsFromElement(doc.RootElement, endpoints);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogDebug(ex, "Failed to parse payload as JSON for endpoint extraction");
|
||||
}
|
||||
|
||||
return [.. endpoints];
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IReadOnlyList<NetworkIntent> ExtractDeclaredIntents(string payload)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Look for "networkIntents" array in the payload
|
||||
if (root.TryGetProperty("networkIntents", out var intentsElement) &&
|
||||
intentsElement.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var intents = new List<NetworkIntent>();
|
||||
foreach (var intentElement in intentsElement.EnumerateArray())
|
||||
{
|
||||
var intent = ParseNetworkIntent(intentElement);
|
||||
if (intent is not null)
|
||||
{
|
||||
intents.Add(intent);
|
||||
}
|
||||
}
|
||||
return intents;
|
||||
}
|
||||
|
||||
// Also check camelCase variant
|
||||
if (root.TryGetProperty("network_intents", out var intentsElement2) &&
|
||||
intentsElement2.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var intents = new List<NetworkIntent>();
|
||||
foreach (var intentElement in intentsElement2.EnumerateArray())
|
||||
{
|
||||
var intent = ParseNetworkIntent(intentElement);
|
||||
if (intent is not null)
|
||||
{
|
||||
intents.Add(intent);
|
||||
}
|
||||
}
|
||||
return intents;
|
||||
}
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogDebug(ex, "Failed to parse payload as JSON for intent extraction");
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
private static NetworkIntent? ParseNetworkIntent(JsonElement element)
|
||||
{
|
||||
if (element.ValueKind != JsonValueKind.Object)
|
||||
return null;
|
||||
|
||||
string? host = null;
|
||||
int? port = null;
|
||||
string protocol = "https";
|
||||
string purpose = "unspecified";
|
||||
var direction = NetworkDirection.Egress;
|
||||
|
||||
if (element.TryGetProperty("host", out var hostProp))
|
||||
host = hostProp.GetString();
|
||||
|
||||
if (element.TryGetProperty("port", out var portProp) && portProp.TryGetInt32(out var portValue))
|
||||
port = portValue;
|
||||
|
||||
if (element.TryGetProperty("protocol", out var protocolProp))
|
||||
protocol = protocolProp.GetString() ?? "https";
|
||||
|
||||
if (element.TryGetProperty("purpose", out var purposeProp))
|
||||
purpose = purposeProp.GetString() ?? "unspecified";
|
||||
|
||||
if (element.TryGetProperty("direction", out var directionProp))
|
||||
{
|
||||
var dirStr = directionProp.GetString();
|
||||
if (string.Equals(dirStr, "ingress", StringComparison.OrdinalIgnoreCase))
|
||||
direction = NetworkDirection.Ingress;
|
||||
}
|
||||
|
||||
return host is not null
|
||||
? new NetworkIntent(host, port, protocol, purpose, direction)
|
||||
: null;
|
||||
}
|
||||
|
||||
private void ExtractEndpointsFromElement(JsonElement element, HashSet<string> endpoints)
|
||||
{
|
||||
switch (element.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
// Check if this is a URL field
|
||||
if (IsUrlFieldName(property.Name) &&
|
||||
property.Value.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
var value = property.Value.GetString();
|
||||
if (!string.IsNullOrEmpty(value) && IsNetworkEndpoint(value))
|
||||
{
|
||||
endpoints.Add(value);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ExtractEndpointsFromElement(property.Value, endpoints);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Array:
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
ExtractEndpointsFromElement(item, endpoints);
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.String:
|
||||
var stringValue = element.GetString();
|
||||
if (!string.IsNullOrEmpty(stringValue) && IsNetworkEndpoint(stringValue))
|
||||
{
|
||||
endpoints.Add(stringValue);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsUrlFieldName(string fieldName)
|
||||
{
|
||||
return UrlFieldNames.Any(name =>
|
||||
fieldName.Contains(name, StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static bool IsNetworkEndpoint(string value)
|
||||
{
|
||||
// Check for URL patterns
|
||||
if (Uri.TryCreate(value, UriKind.Absolute, out var uri))
|
||||
{
|
||||
return uri.Scheme is "http" or "https" or "grpc" or "grpcs";
|
||||
}
|
||||
|
||||
// Check for host:port patterns
|
||||
return HostPortRegex().IsMatch(value);
|
||||
}
|
||||
|
||||
private static string? ExtractHostFromEndpoint(string endpoint)
|
||||
{
|
||||
if (Uri.TryCreate(endpoint, UriKind.Absolute, out var uri))
|
||||
{
|
||||
return uri.Host;
|
||||
}
|
||||
|
||||
// Try host:port format
|
||||
var match = HostPortRegex().Match(endpoint);
|
||||
if (match.Success)
|
||||
{
|
||||
return match.Groups[1].Value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static bool HostMatchesPattern(string host, string pattern)
|
||||
{
|
||||
if (string.Equals(pattern, "*", StringComparison.Ordinal))
|
||||
return true;
|
||||
|
||||
if (pattern.StartsWith("*.", StringComparison.Ordinal))
|
||||
{
|
||||
var suffix = pattern[1..];
|
||||
return host.EndsWith(suffix, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(host, pattern[2..], StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return string.Equals(host, pattern, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[GeneratedRegex(@"^([a-zA-Z0-9][-a-zA-Z0-9]*\.)+[a-zA-Z]{2,}(:\d+)?$")]
|
||||
private static partial Regex HostPortRegex();
|
||||
}
|
||||
@@ -0,0 +1,258 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
|
||||
/// <summary>
|
||||
/// Enforcement mode for air-gap policies.
|
||||
/// </summary>
|
||||
public enum EnforcementMode
|
||||
{
|
||||
/// <summary>Enforcement is disabled.</summary>
|
||||
Disabled,
|
||||
|
||||
/// <summary>Violations are logged as warnings but not blocked.</summary>
|
||||
Warn,
|
||||
|
||||
/// <summary>Violations are blocked strictly.</summary>
|
||||
Strict
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Declares a network intent for a job descriptor.
|
||||
/// Per ORCH-AIRGAP-56-001: Enforce job descriptors to declare network intents.
|
||||
/// </summary>
|
||||
public sealed record NetworkIntent(
|
||||
/// <summary>Target host or hostname pattern.</summary>
|
||||
string Host,
|
||||
|
||||
/// <summary>Target port (null for any port).</summary>
|
||||
int? Port,
|
||||
|
||||
/// <summary>Protocol (http, https, grpc, etc.).</summary>
|
||||
string Protocol,
|
||||
|
||||
/// <summary>Purpose description for audit trail.</summary>
|
||||
string Purpose,
|
||||
|
||||
/// <summary>Whether this is an egress (outbound) or ingress (inbound) intent.</summary>
|
||||
NetworkDirection Direction = NetworkDirection.Egress)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a network intent for HTTPS egress to a specific host.
|
||||
/// </summary>
|
||||
public static NetworkIntent HttpsEgress(string host, string purpose, int? port = 443)
|
||||
=> new(host, port, "https", purpose, NetworkDirection.Egress);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a network intent for HTTP egress to a specific host.
|
||||
/// </summary>
|
||||
public static NetworkIntent HttpEgress(string host, string purpose, int? port = 80)
|
||||
=> new(host, port, "http", purpose, NetworkDirection.Egress);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a network intent for gRPC egress to a specific host.
|
||||
/// </summary>
|
||||
public static NetworkIntent GrpcEgress(string host, string purpose, int? port = 443)
|
||||
=> new(host, port, "grpc", purpose, NetworkDirection.Egress);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if this intent matches an allowlist entry.
|
||||
/// </summary>
|
||||
public bool MatchesAllowlistEntry(NetworkAllowlistEntry entry)
|
||||
{
|
||||
if (!HostMatches(entry.HostPattern))
|
||||
return false;
|
||||
|
||||
if (entry.Port.HasValue && Port.HasValue && entry.Port != Port)
|
||||
return false;
|
||||
|
||||
if (!string.IsNullOrEmpty(entry.Protocol) &&
|
||||
!string.Equals(entry.Protocol, Protocol, StringComparison.OrdinalIgnoreCase))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool HostMatches(string pattern)
|
||||
{
|
||||
if (string.Equals(pattern, "*", StringComparison.Ordinal))
|
||||
return true;
|
||||
|
||||
if (pattern.StartsWith("*.", StringComparison.Ordinal))
|
||||
{
|
||||
var suffix = pattern[1..]; // e.g., ".example.com"
|
||||
return Host.EndsWith(suffix, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(Host, pattern[2..], StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return string.Equals(Host, pattern, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Network traffic direction.
|
||||
/// </summary>
|
||||
public enum NetworkDirection
|
||||
{
|
||||
/// <summary>Outbound traffic from the job.</summary>
|
||||
Egress,
|
||||
|
||||
/// <summary>Inbound traffic to the job (e.g., callbacks).</summary>
|
||||
Ingress
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry in the network allowlist for sealed mode.
|
||||
/// </summary>
|
||||
public sealed record NetworkAllowlistEntry(
|
||||
/// <summary>Host pattern (exact match or wildcard like "*.example.com").</summary>
|
||||
string HostPattern,
|
||||
|
||||
/// <summary>Allowed port (null for any port).</summary>
|
||||
int? Port = null,
|
||||
|
||||
/// <summary>Allowed protocol (null for any protocol).</summary>
|
||||
string? Protocol = null,
|
||||
|
||||
/// <summary>Description of why this entry is allowed.</summary>
|
||||
string? Description = null);
|
||||
|
||||
/// <summary>
|
||||
/// Result of network intent validation.
|
||||
/// </summary>
|
||||
public sealed record NetworkIntentValidationResult(
|
||||
/// <summary>Whether the validation passed.</summary>
|
||||
bool IsValid,
|
||||
|
||||
/// <summary>Whether the job should be blocked from scheduling.</summary>
|
||||
bool ShouldBlock,
|
||||
|
||||
/// <summary>Error code if validation failed.</summary>
|
||||
string? ErrorCode,
|
||||
|
||||
/// <summary>Human-readable error message.</summary>
|
||||
string? ErrorMessage,
|
||||
|
||||
/// <summary>Detailed violations found.</summary>
|
||||
IReadOnlyList<NetworkIntentViolation> Violations,
|
||||
|
||||
/// <summary>Recommendations for resolving violations.</summary>
|
||||
IReadOnlyList<string> Recommendations)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a successful validation result.
|
||||
/// </summary>
|
||||
public static NetworkIntentValidationResult Success()
|
||||
=> new(true, false, null, null, [], []);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a validation result for missing network intents.
|
||||
/// </summary>
|
||||
public static NetworkIntentValidationResult MissingIntents(
|
||||
IReadOnlyList<string> detectedEndpoints,
|
||||
bool shouldBlock)
|
||||
{
|
||||
var violations = detectedEndpoints
|
||||
.Select(e => new NetworkIntentViolation(e, NetworkViolationType.MissingIntent, null))
|
||||
.ToList();
|
||||
|
||||
return new(
|
||||
IsValid: false,
|
||||
ShouldBlock: shouldBlock,
|
||||
ErrorCode: "NETWORK_INTENT_MISSING",
|
||||
ErrorMessage: $"Job accesses {detectedEndpoints.Count} network endpoint(s) without declared intents",
|
||||
Violations: violations,
|
||||
Recommendations: [
|
||||
"Add 'networkIntents' to the job payload declaring all external endpoints",
|
||||
"Use NetworkIntent.HttpsEgress() for HTTPS endpoints",
|
||||
$"Endpoints detected: {string.Join(", ", detectedEndpoints.Take(5))}"
|
||||
]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a validation result for disallowed network intents.
|
||||
/// </summary>
|
||||
public static NetworkIntentValidationResult DisallowedIntents(
|
||||
IReadOnlyList<NetworkIntentViolation> violations,
|
||||
bool shouldBlock)
|
||||
{
|
||||
var disallowedHosts = violations
|
||||
.Where(v => v.ViolationType == NetworkViolationType.NotInAllowlist)
|
||||
.Select(v => v.Endpoint)
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
return new(
|
||||
IsValid: false,
|
||||
ShouldBlock: shouldBlock,
|
||||
ErrorCode: "NETWORK_INTENT_DISALLOWED",
|
||||
ErrorMessage: $"Job declares {violations.Count} network intent(s) not in sealed-mode allowlist",
|
||||
Violations: violations,
|
||||
Recommendations: [
|
||||
"Add the required hosts to the air-gap egress allowlist",
|
||||
"Or disable network intent enforcement in the staleness configuration",
|
||||
$"Disallowed hosts: {string.Join(", ", disallowedHosts.Take(5))}"
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A specific network intent violation.
|
||||
/// </summary>
|
||||
public sealed record NetworkIntentViolation(
|
||||
/// <summary>The endpoint that violated the policy.</summary>
|
||||
string Endpoint,
|
||||
|
||||
/// <summary>Type of violation.</summary>
|
||||
NetworkViolationType ViolationType,
|
||||
|
||||
/// <summary>The intent that caused the violation (if any).</summary>
|
||||
NetworkIntent? Intent);
|
||||
|
||||
/// <summary>
|
||||
/// Type of network intent violation.
|
||||
/// </summary>
|
||||
public enum NetworkViolationType
|
||||
{
|
||||
/// <summary>Network endpoint accessed without a declared intent.</summary>
|
||||
MissingIntent,
|
||||
|
||||
/// <summary>Declared intent is not in the sealed-mode allowlist.</summary>
|
||||
NotInAllowlist,
|
||||
|
||||
/// <summary>Intent declared for blocked protocol.</summary>
|
||||
BlockedProtocol,
|
||||
|
||||
/// <summary>Intent declared for blocked port.</summary>
|
||||
BlockedPort
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for network intent enforcement.
|
||||
/// </summary>
|
||||
public sealed record NetworkIntentConfig(
|
||||
/// <summary>Enforcement mode for network intent validation.</summary>
|
||||
EnforcementMode EnforcementMode = EnforcementMode.Warn,
|
||||
|
||||
/// <summary>Allowlist of permitted network endpoints in sealed mode.</summary>
|
||||
IReadOnlyList<NetworkAllowlistEntry>? Allowlist = null,
|
||||
|
||||
/// <summary>Whether to require explicit intent declarations.</summary>
|
||||
bool RequireExplicitIntents = true,
|
||||
|
||||
/// <summary>Protocols that are always blocked.</summary>
|
||||
IReadOnlyList<string>? BlockedProtocols = null)
|
||||
{
|
||||
/// <summary>
|
||||
/// Default configuration with warning mode.
|
||||
/// </summary>
|
||||
public static NetworkIntentConfig Default => new();
|
||||
|
||||
/// <summary>
|
||||
/// Strict enforcement configuration.
|
||||
/// </summary>
|
||||
public static NetworkIntentConfig Strict => new(EnforcementMode.Strict);
|
||||
|
||||
/// <summary>
|
||||
/// Disabled enforcement configuration.
|
||||
/// </summary>
|
||||
public static NetworkIntentConfig Disabled => new(EnforcementMode.Disabled);
|
||||
}
|
||||
@@ -0,0 +1,426 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Mirror;
|
||||
|
||||
/// <summary>
|
||||
/// Mirror bundle job payload containing bundle-specific parameters.
|
||||
/// Serialized to JSON and stored in Job.Payload.
|
||||
/// Per ORCH-AIRGAP-57-001.
|
||||
/// </summary>
|
||||
public sealed record MirrorBundlePayload(
|
||||
/// <summary>Domains to include in the bundle (vex-advisories, vulnerability-feeds, etc.).</summary>
|
||||
IReadOnlyList<string> Domains,
|
||||
|
||||
/// <summary>Start of time range to include (inclusive).</summary>
|
||||
DateTimeOffset? StartTime,
|
||||
|
||||
/// <summary>End of time range to include (exclusive).</summary>
|
||||
DateTimeOffset? EndTime,
|
||||
|
||||
/// <summary>Target environment identifier for the bundle.</summary>
|
||||
string? TargetEnvironment,
|
||||
|
||||
/// <summary>Maximum staleness allowed in bundle data (seconds).</summary>
|
||||
int? MaxStalenessSeconds,
|
||||
|
||||
/// <summary>Whether to include full provenance chain.</summary>
|
||||
bool IncludeProvenance,
|
||||
|
||||
/// <summary>Whether to include audit trail entries.</summary>
|
||||
bool IncludeAuditTrail,
|
||||
|
||||
/// <summary>Whether to sign the bundle with DSSE.</summary>
|
||||
bool SignBundle,
|
||||
|
||||
/// <summary>Signing key identifier.</summary>
|
||||
string? SigningKeyId,
|
||||
|
||||
/// <summary>Compression format (null = none, "gzip", "zstd").</summary>
|
||||
string? Compression,
|
||||
|
||||
/// <summary>Destination URI for the bundle output.</summary>
|
||||
string? DestinationUri,
|
||||
|
||||
/// <summary>Whether to include time anchor for staleness validation.</summary>
|
||||
bool IncludeTimeAnchor,
|
||||
|
||||
/// <summary>Additional bundle-specific options.</summary>
|
||||
IReadOnlyDictionary<string, string>? Options)
|
||||
{
|
||||
/// <summary>Default bundle payload with minimal settings.</summary>
|
||||
public static MirrorBundlePayload Default(IReadOnlyList<string> domains) => new(
|
||||
Domains: domains,
|
||||
StartTime: null,
|
||||
EndTime: null,
|
||||
TargetEnvironment: null,
|
||||
MaxStalenessSeconds: null,
|
||||
IncludeProvenance: true,
|
||||
IncludeAuditTrail: true,
|
||||
SignBundle: true,
|
||||
SigningKeyId: null,
|
||||
Compression: "gzip",
|
||||
DestinationUri: null,
|
||||
IncludeTimeAnchor: true,
|
||||
Options: null);
|
||||
|
||||
/// <summary>Serializes the payload to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Computes SHA-256 digest of the payload.</summary>
|
||||
public string ComputeDigest()
|
||||
{
|
||||
var json = ToJson();
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
/// <summary>Deserializes a payload from JSON. Returns null for invalid JSON.</summary>
|
||||
public static MirrorBundlePayload? FromJson(string json)
|
||||
{
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<MirrorBundlePayload>(json, JsonOptions);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mirror bundle job result containing output metadata and provenance.
|
||||
/// Per ORCH-AIRGAP-57-001.
|
||||
/// </summary>
|
||||
public sealed record MirrorBundleResult(
|
||||
/// <summary>Output URI where bundle is stored.</summary>
|
||||
string OutputUri,
|
||||
|
||||
/// <summary>SHA-256 digest of the bundle.</summary>
|
||||
string BundleDigest,
|
||||
|
||||
/// <summary>SHA-256 digest of the bundle manifest.</summary>
|
||||
string ManifestDigest,
|
||||
|
||||
/// <summary>Bundle size in bytes.</summary>
|
||||
long BundleSizeBytes,
|
||||
|
||||
/// <summary>Domains included in the bundle.</summary>
|
||||
IReadOnlyList<string> IncludedDomains,
|
||||
|
||||
/// <summary>Per-domain export records.</summary>
|
||||
IReadOnlyList<ExportRecord> Exports,
|
||||
|
||||
/// <summary>Provenance attestation URI (if signed).</summary>
|
||||
string? ProvenanceUri,
|
||||
|
||||
/// <summary>Audit trail URI (if included).</summary>
|
||||
string? AuditTrailUri,
|
||||
|
||||
/// <summary>Audit trail entry count.</summary>
|
||||
int? AuditEntryCount,
|
||||
|
||||
/// <summary>Time anchor included in bundle.</summary>
|
||||
TimeAnchor? TimeAnchor,
|
||||
|
||||
/// <summary>Compression applied.</summary>
|
||||
string? Compression,
|
||||
|
||||
/// <summary>Source environment identifier.</summary>
|
||||
string SourceEnvironment,
|
||||
|
||||
/// <summary>Target environment identifier (if specified).</summary>
|
||||
string? TargetEnvironment,
|
||||
|
||||
/// <summary>Bundle generation timestamp.</summary>
|
||||
DateTimeOffset GeneratedAt,
|
||||
|
||||
/// <summary>Duration of bundle creation in seconds.</summary>
|
||||
double DurationSeconds,
|
||||
|
||||
/// <summary>DSSE signature (if signed).</summary>
|
||||
MirrorBundleSignature? Signature)
|
||||
{
|
||||
/// <summary>Serializes the result to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes a result from JSON.</summary>
|
||||
public static MirrorBundleResult? FromJson(string json) =>
|
||||
JsonSerializer.Deserialize<MirrorBundleResult>(json, JsonOptions);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature for a mirror bundle.
|
||||
/// </summary>
|
||||
public sealed record MirrorBundleSignature(
|
||||
/// <summary>Signature algorithm (e.g., "ECDSA-P256-SHA256").</summary>
|
||||
string Algorithm,
|
||||
|
||||
/// <summary>Signing key identifier.</summary>
|
||||
string KeyId,
|
||||
|
||||
/// <summary>Signature value (base64).</summary>
|
||||
string SignatureValue,
|
||||
|
||||
/// <summary>Signed timestamp.</summary>
|
||||
DateTimeOffset SignedAt,
|
||||
|
||||
/// <summary>DSSE payload type.</summary>
|
||||
string PayloadType,
|
||||
|
||||
/// <summary>URI to full DSSE envelope.</summary>
|
||||
string? EnvelopeUri);
|
||||
|
||||
/// <summary>
|
||||
/// Audit trail record included in mirror bundle.
|
||||
/// Per ORCH-AIRGAP-57-001.
|
||||
/// </summary>
|
||||
public sealed record MirrorAuditEntry(
|
||||
/// <summary>Audit entry ID.</summary>
|
||||
Guid EntryId,
|
||||
|
||||
/// <summary>Event type.</summary>
|
||||
string EventType,
|
||||
|
||||
/// <summary>Event timestamp.</summary>
|
||||
DateTimeOffset Timestamp,
|
||||
|
||||
/// <summary>Actor who triggered the event.</summary>
|
||||
string? Actor,
|
||||
|
||||
/// <summary>Affected domain.</summary>
|
||||
string? DomainId,
|
||||
|
||||
/// <summary>Affected entity ID.</summary>
|
||||
Guid? EntityId,
|
||||
|
||||
/// <summary>Event details.</summary>
|
||||
string? Details,
|
||||
|
||||
/// <summary>Content hash for integrity verification.</summary>
|
||||
string ContentHash,
|
||||
|
||||
/// <summary>Correlation ID for related events.</summary>
|
||||
string? CorrelationId)
|
||||
{
|
||||
/// <summary>Computes SHA-256 digest of the entry for verification.</summary>
|
||||
public string ComputeDigest()
|
||||
{
|
||||
var canonical = $"{EntryId}|{EventType}|{Timestamp:o}|{Actor ?? ""}|{DomainId ?? ""}|{EntityId?.ToString() ?? ""}|{Details ?? ""}|{CorrelationId ?? ""}";
|
||||
var bytes = Encoding.UTF8.GetBytes(canonical);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mirror bundle job progress information.
|
||||
/// </summary>
|
||||
public sealed record MirrorBundleProgress(
|
||||
/// <summary>Current phase of bundle creation.</summary>
|
||||
MirrorPhase Phase,
|
||||
|
||||
/// <summary>Domains processed so far.</summary>
|
||||
int DomainsProcessed,
|
||||
|
||||
/// <summary>Total domains to process.</summary>
|
||||
int TotalDomains,
|
||||
|
||||
/// <summary>Records processed so far.</summary>
|
||||
int RecordsProcessed,
|
||||
|
||||
/// <summary>Bytes written so far.</summary>
|
||||
long BytesWritten,
|
||||
|
||||
/// <summary>Audit entries collected.</summary>
|
||||
int AuditEntriesCollected,
|
||||
|
||||
/// <summary>Current progress message.</summary>
|
||||
string? Message)
|
||||
{
|
||||
/// <summary>Computes progress percentage (0-100).</summary>
|
||||
public double? ProgressPercent => TotalDomains > 0
|
||||
? Math.Min(100.0, 100.0 * DomainsProcessed / TotalDomains)
|
||||
: null;
|
||||
|
||||
/// <summary>Serializes the progress to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes progress from JSON.</summary>
|
||||
public static MirrorBundleProgress? FromJson(string json) =>
|
||||
JsonSerializer.Deserialize<MirrorBundleProgress>(json, JsonOptions);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mirror bundle job phases.
|
||||
/// </summary>
|
||||
public enum MirrorPhase
|
||||
{
|
||||
/// <summary>Initializing bundle creation.</summary>
|
||||
Initializing = 0,
|
||||
|
||||
/// <summary>Validating staleness requirements.</summary>
|
||||
ValidatingStaleness = 1,
|
||||
|
||||
/// <summary>Collecting domain data.</summary>
|
||||
CollectingDomainData = 2,
|
||||
|
||||
/// <summary>Collecting audit trail.</summary>
|
||||
CollectingAuditTrail = 3,
|
||||
|
||||
/// <summary>Generating provenance.</summary>
|
||||
GeneratingProvenance = 4,
|
||||
|
||||
/// <summary>Creating time anchor.</summary>
|
||||
CreatingTimeAnchor = 5,
|
||||
|
||||
/// <summary>Compressing bundle.</summary>
|
||||
Compressing = 6,
|
||||
|
||||
/// <summary>Signing bundle with DSSE.</summary>
|
||||
Signing = 7,
|
||||
|
||||
/// <summary>Uploading to destination.</summary>
|
||||
Uploading = 8,
|
||||
|
||||
/// <summary>Finalizing bundle.</summary>
|
||||
Finalizing = 9,
|
||||
|
||||
/// <summary>Bundle creation completed.</summary>
|
||||
Completed = 10
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Manifest for a mirror bundle describing its contents.
|
||||
/// </summary>
|
||||
public sealed record MirrorBundleManifest(
|
||||
/// <summary>Bundle identifier.</summary>
|
||||
Guid BundleId,
|
||||
|
||||
/// <summary>Manifest schema version.</summary>
|
||||
string SchemaVersion,
|
||||
|
||||
/// <summary>Source environment identifier.</summary>
|
||||
string SourceEnvironment,
|
||||
|
||||
/// <summary>Target environment identifier (if specified).</summary>
|
||||
string? TargetEnvironment,
|
||||
|
||||
/// <summary>Bundle creation timestamp.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>Domains included in the bundle.</summary>
|
||||
IReadOnlyList<MirrorDomainEntry> Domains,
|
||||
|
||||
/// <summary>Time anchor for staleness validation.</summary>
|
||||
TimeAnchor? TimeAnchor,
|
||||
|
||||
/// <summary>Provenance record.</summary>
|
||||
BundleProvenance Provenance,
|
||||
|
||||
/// <summary>Audit trail summary.</summary>
|
||||
MirrorAuditSummary? AuditSummary,
|
||||
|
||||
/// <summary>Bundle metadata.</summary>
|
||||
IReadOnlyDictionary<string, string>? Metadata)
|
||||
{
|
||||
/// <summary>Current manifest schema version.</summary>
|
||||
public const string CurrentSchemaVersion = "1.0.0";
|
||||
|
||||
/// <summary>Serializes the manifest to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Computes SHA-256 digest of the manifest.</summary>
|
||||
public string ComputeDigest()
|
||||
{
|
||||
var json = ToJson();
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
/// <summary>Deserializes a manifest from JSON.</summary>
|
||||
public static MirrorBundleManifest? FromJson(string json) =>
|
||||
JsonSerializer.Deserialize<MirrorBundleManifest>(json, JsonOptions);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Domain entry in a mirror bundle manifest.
|
||||
/// </summary>
|
||||
public sealed record MirrorDomainEntry(
|
||||
/// <summary>Domain identifier.</summary>
|
||||
string DomainId,
|
||||
|
||||
/// <summary>Export format.</summary>
|
||||
ExportFormat Format,
|
||||
|
||||
/// <summary>Export file path within bundle.</summary>
|
||||
string FilePath,
|
||||
|
||||
/// <summary>Export digest.</summary>
|
||||
string Digest,
|
||||
|
||||
/// <summary>Export size in bytes.</summary>
|
||||
long SizeBytes,
|
||||
|
||||
/// <summary>Record count in export.</summary>
|
||||
int RecordCount,
|
||||
|
||||
/// <summary>Source timestamp of the data.</summary>
|
||||
DateTimeOffset SourceTimestamp,
|
||||
|
||||
/// <summary>Staleness at bundle creation time (seconds).</summary>
|
||||
int StalenessSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of audit trail included in mirror bundle.
|
||||
/// </summary>
|
||||
public sealed record MirrorAuditSummary(
|
||||
/// <summary>Total audit entries in bundle.</summary>
|
||||
int TotalEntries,
|
||||
|
||||
/// <summary>Audit trail file path within bundle.</summary>
|
||||
string FilePath,
|
||||
|
||||
/// <summary>Audit trail digest.</summary>
|
||||
string Digest,
|
||||
|
||||
/// <summary>Audit trail size in bytes.</summary>
|
||||
long SizeBytes,
|
||||
|
||||
/// <summary>Earliest audit entry timestamp.</summary>
|
||||
DateTimeOffset EarliestEntry,
|
||||
|
||||
/// <summary>Latest audit entry timestamp.</summary>
|
||||
DateTimeOffset LatestEntry,
|
||||
|
||||
/// <summary>Event type counts.</summary>
|
||||
IReadOnlyDictionary<string, int> EventTypeCounts);
|
||||
@@ -0,0 +1,54 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Mirror;
|
||||
|
||||
/// <summary>
|
||||
/// Standard mirror job type identifiers for air-gap bundle operations.
|
||||
/// Mirror jobs follow the pattern "mirror.{operation}" where operation is the mirror action.
|
||||
/// Per ORCH-AIRGAP-57-001.
|
||||
/// </summary>
|
||||
public static class MirrorJobTypes
|
||||
{
|
||||
/// <summary>Job type prefix for all mirror jobs.</summary>
|
||||
public const string Prefix = "mirror.";
|
||||
|
||||
/// <summary>Bundle creation for air-gap export (creates portable bundle with provenance).</summary>
|
||||
public const string Bundle = "mirror.bundle";
|
||||
|
||||
/// <summary>Bundle import from external source (validates and imports portable bundle).</summary>
|
||||
public const string Import = "mirror.import";
|
||||
|
||||
/// <summary>Bundle verification (validates bundle integrity without importing).</summary>
|
||||
public const string Verify = "mirror.verify";
|
||||
|
||||
/// <summary>Bundle sync (synchronizes bundles between environments).</summary>
|
||||
public const string Sync = "mirror.sync";
|
||||
|
||||
/// <summary>Bundle diff (compares bundles to identify delta).</summary>
|
||||
public const string Diff = "mirror.diff";
|
||||
|
||||
/// <summary>All known mirror job types.</summary>
|
||||
public static readonly IReadOnlyList<string> All =
|
||||
[
|
||||
Bundle,
|
||||
Import,
|
||||
Verify,
|
||||
Sync,
|
||||
Diff
|
||||
];
|
||||
|
||||
/// <summary>Checks if a job type is a mirror job.</summary>
|
||||
public static bool IsMirrorJob(string? jobType) =>
|
||||
jobType is not null && jobType.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
/// <summary>Gets the mirror operation from a job type (e.g., "bundle" from "mirror.bundle").</summary>
|
||||
public static string? GetMirrorOperation(string? jobType)
|
||||
{
|
||||
if (!IsMirrorJob(jobType))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return jobType!.Length > Prefix.Length
|
||||
? jobType[Prefix.Length..]
|
||||
: null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,854 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
using StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Domain.Mirror;
|
||||
|
||||
/// <summary>
|
||||
/// Event types for mirror operations.
|
||||
/// Per ORCH-AIRGAP-58-001.
|
||||
/// </summary>
|
||||
public static class MirrorEventTypes
|
||||
{
|
||||
public const string Prefix = "mirror.";
|
||||
|
||||
// Bundle operations
|
||||
public const string BundleStarted = "mirror.bundle.started";
|
||||
public const string BundleProgress = "mirror.bundle.progress";
|
||||
public const string BundleCompleted = "mirror.bundle.completed";
|
||||
public const string BundleFailed = "mirror.bundle.failed";
|
||||
|
||||
// Import operations
|
||||
public const string ImportStarted = "mirror.import.started";
|
||||
public const string ImportValidated = "mirror.import.validated";
|
||||
public const string ImportCompleted = "mirror.import.completed";
|
||||
public const string ImportFailed = "mirror.import.failed";
|
||||
|
||||
// Verification operations
|
||||
public const string VerifyStarted = "mirror.verify.started";
|
||||
public const string VerifyCompleted = "mirror.verify.completed";
|
||||
public const string VerifyFailed = "mirror.verify.failed";
|
||||
|
||||
// Sync operations
|
||||
public const string SyncStarted = "mirror.sync.started";
|
||||
public const string SyncProgress = "mirror.sync.progress";
|
||||
public const string SyncCompleted = "mirror.sync.completed";
|
||||
public const string SyncFailed = "mirror.sync.failed";
|
||||
|
||||
// Evidence capture
|
||||
public const string EvidenceCaptured = "mirror.evidence.captured";
|
||||
public const string ProvenanceRecorded = "mirror.provenance.recorded";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for recording mirror import/export operations as timeline events and evidence entries.
|
||||
/// Per ORCH-AIRGAP-58-001.
|
||||
/// </summary>
|
||||
public interface IMirrorOperationRecorder
|
||||
{
|
||||
/// <summary>Records the start of a bundle creation operation.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordBundleStartedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorBundlePayload payload,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records bundle creation progress.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordBundleProgressAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorBundleProgress progress,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records successful bundle completion with evidence.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordBundleCompletedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorBundleResult result,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records bundle creation failure.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordBundleFailedAsync(
|
||||
MirrorOperationContext context,
|
||||
string errorCode,
|
||||
string errorMessage,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records the start of an import operation.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordImportStartedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorImportRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records successful import validation.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordImportValidatedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorImportValidation validation,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records successful import completion.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordImportCompletedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorImportResult result,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Records import failure.</summary>
|
||||
Task<MirrorOperationRecordResult> RecordImportFailedAsync(
|
||||
MirrorOperationContext context,
|
||||
string errorCode,
|
||||
string errorMessage,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context for mirror operations.
|
||||
/// </summary>
|
||||
public sealed record MirrorOperationContext(
|
||||
/// <summary>Tenant scope.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Project scope.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Job identifier.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Operation identifier.</summary>
|
||||
Guid OperationId,
|
||||
|
||||
/// <summary>Job type.</summary>
|
||||
string JobType,
|
||||
|
||||
/// <summary>Actor triggering the operation.</summary>
|
||||
string? Actor,
|
||||
|
||||
/// <summary>Trace ID for correlation.</summary>
|
||||
string? TraceId,
|
||||
|
||||
/// <summary>Span ID for correlation.</summary>
|
||||
string? SpanId,
|
||||
|
||||
/// <summary>Source environment identifier.</summary>
|
||||
string SourceEnvironment,
|
||||
|
||||
/// <summary>Target environment identifier.</summary>
|
||||
string? TargetEnvironment);
|
||||
|
||||
/// <summary>
|
||||
/// Result of recording a mirror operation.
|
||||
/// </summary>
|
||||
public sealed record MirrorOperationRecordResult(
|
||||
/// <summary>Whether recording was successful.</summary>
|
||||
bool Success,
|
||||
|
||||
/// <summary>Timeline event ID.</summary>
|
||||
Guid? EventId,
|
||||
|
||||
/// <summary>Evidence capsule ID if created.</summary>
|
||||
Guid? CapsuleId,
|
||||
|
||||
/// <summary>Evidence pointer for downstream consumers.</summary>
|
||||
EvidencePointer? EvidencePointer,
|
||||
|
||||
/// <summary>Error message if recording failed.</summary>
|
||||
string? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Import request details.
|
||||
/// </summary>
|
||||
public sealed record MirrorImportRequest(
|
||||
/// <summary>Bundle URI to import.</summary>
|
||||
string BundleUri,
|
||||
|
||||
/// <summary>Expected bundle digest.</summary>
|
||||
string? ExpectedDigest,
|
||||
|
||||
/// <summary>Whether to validate signatures.</summary>
|
||||
bool ValidateSignatures,
|
||||
|
||||
/// <summary>Whether to verify provenance chain.</summary>
|
||||
bool VerifyProvenance,
|
||||
|
||||
/// <summary>Maximum staleness allowed (seconds).</summary>
|
||||
int? MaxStalenessSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Import validation result.
|
||||
/// </summary>
|
||||
public sealed record MirrorImportValidation(
|
||||
/// <summary>Whether bundle is valid.</summary>
|
||||
bool IsValid,
|
||||
|
||||
/// <summary>Verified bundle digest.</summary>
|
||||
string BundleDigest,
|
||||
|
||||
/// <summary>Verified manifest digest.</summary>
|
||||
string ManifestDigest,
|
||||
|
||||
/// <summary>Whether signature was verified.</summary>
|
||||
bool SignatureVerified,
|
||||
|
||||
/// <summary>Whether provenance was verified.</summary>
|
||||
bool ProvenanceVerified,
|
||||
|
||||
/// <summary>Staleness at validation time (seconds).</summary>
|
||||
int? StalenessSeconds,
|
||||
|
||||
/// <summary>Validation warnings.</summary>
|
||||
IReadOnlyList<string>? Warnings);
|
||||
|
||||
/// <summary>
|
||||
/// Import result details.
|
||||
/// </summary>
|
||||
public sealed record MirrorImportResult(
|
||||
/// <summary>Number of domains imported.</summary>
|
||||
int DomainsImported,
|
||||
|
||||
/// <summary>Number of records imported.</summary>
|
||||
int RecordsImported,
|
||||
|
||||
/// <summary>Import duration in seconds.</summary>
|
||||
double DurationSeconds,
|
||||
|
||||
/// <summary>Time anchor from bundle.</summary>
|
||||
TimeAnchor? TimeAnchor,
|
||||
|
||||
/// <summary>Provenance record.</summary>
|
||||
MirrorImportProvenance Provenance);
|
||||
|
||||
/// <summary>
|
||||
/// Provenance record for imported bundle.
|
||||
/// </summary>
|
||||
public sealed record MirrorImportProvenance(
|
||||
/// <summary>Original bundle ID.</summary>
|
||||
Guid BundleId,
|
||||
|
||||
/// <summary>Source environment.</summary>
|
||||
string SourceEnvironment,
|
||||
|
||||
/// <summary>Original creation timestamp.</summary>
|
||||
DateTimeOffset OriginalCreatedAt,
|
||||
|
||||
/// <summary>Bundle digest.</summary>
|
||||
string BundleDigest,
|
||||
|
||||
/// <summary>Signing key ID.</summary>
|
||||
string? SigningKeyId,
|
||||
|
||||
/// <summary>Import timestamp.</summary>
|
||||
DateTimeOffset ImportedAt);
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of mirror operation recorder.
|
||||
/// </summary>
|
||||
public sealed class MirrorOperationRecorder : IMirrorOperationRecorder
|
||||
{
|
||||
private const string Source = "orchestrator-mirror";
|
||||
|
||||
private readonly ITimelineEventEmitter _timelineEmitter;
|
||||
private readonly IJobCapsuleGenerator _capsuleGenerator;
|
||||
private readonly IMirrorEvidenceStore _evidenceStore;
|
||||
private readonly ILogger<MirrorOperationRecorder> _logger;
|
||||
|
||||
public MirrorOperationRecorder(
|
||||
ITimelineEventEmitter timelineEmitter,
|
||||
IJobCapsuleGenerator capsuleGenerator,
|
||||
IMirrorEvidenceStore evidenceStore,
|
||||
ILogger<MirrorOperationRecorder> logger)
|
||||
{
|
||||
_timelineEmitter = timelineEmitter ?? throw new ArgumentNullException(nameof(timelineEmitter));
|
||||
_capsuleGenerator = capsuleGenerator ?? throw new ArgumentNullException(nameof(capsuleGenerator));
|
||||
_evidenceStore = evidenceStore ?? throw new ArgumentNullException(nameof(evidenceStore));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordBundleStartedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorBundlePayload payload,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["domainsCount"] = payload.Domains.Count.ToString();
|
||||
attributes["includeProvenance"] = payload.IncludeProvenance.ToString();
|
||||
attributes["includeAuditTrail"] = payload.IncludeAuditTrail.ToString();
|
||||
|
||||
var eventPayload = new
|
||||
{
|
||||
operationId = context.OperationId,
|
||||
domains = payload.Domains,
|
||||
targetEnvironment = payload.TargetEnvironment,
|
||||
compression = payload.Compression,
|
||||
signBundle = payload.SignBundle
|
||||
};
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.BundleStarted,
|
||||
payload: eventPayload,
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded bundle started for job {JobId} operation {OperationId}",
|
||||
context.JobId, context.OperationId);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record bundle started for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordBundleProgressAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorBundleProgress progress,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["phase"] = progress.Phase.ToString();
|
||||
attributes["domainsProcessed"] = progress.DomainsProcessed.ToString();
|
||||
attributes["totalDomains"] = progress.TotalDomains.ToString();
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.BundleProgress,
|
||||
payload: progress,
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record bundle progress for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordBundleCompletedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorBundleResult result,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Create evidence entry
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: context.OperationId,
|
||||
OperationType: MirrorOperationType.BundleExport,
|
||||
TenantId: context.TenantId,
|
||||
ProjectId: context.ProjectId,
|
||||
JobId: context.JobId,
|
||||
Status: MirrorOperationStatus.Completed,
|
||||
StartedAt: DateTimeOffset.UtcNow.AddSeconds(-result.DurationSeconds),
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: context.SourceEnvironment,
|
||||
TargetEnvironment: context.TargetEnvironment,
|
||||
BundleDigest: result.BundleDigest,
|
||||
ManifestDigest: result.ManifestDigest,
|
||||
ProvenanceUri: result.ProvenanceUri,
|
||||
AuditTrailUri: result.AuditTrailUri,
|
||||
DomainsCount: result.IncludedDomains.Count,
|
||||
RecordsCount: result.Exports.Sum(e => e.RecordCount ?? 0),
|
||||
SizeBytes: result.BundleSizeBytes,
|
||||
DurationSeconds: result.DurationSeconds,
|
||||
Error: null);
|
||||
|
||||
await _evidenceStore.StoreAsync(evidence, cancellationToken);
|
||||
|
||||
// Create job capsule for Evidence Locker
|
||||
var capsuleRequest = new JobCapsuleRequest(
|
||||
TenantId: context.TenantId,
|
||||
JobId: context.JobId,
|
||||
JobType: context.JobType,
|
||||
PayloadJson: result.ToJson(),
|
||||
ProjectId: context.ProjectId,
|
||||
SourceRef: new JobCapsuleSourceRef("mirror.bundle", context.OperationId.ToString(), context.Actor, context.TraceId),
|
||||
Environment: new JobCapsuleEnvironment(null, null, null, false, null),
|
||||
Metadata: new Dictionary<string, string>
|
||||
{
|
||||
["operationId"] = context.OperationId.ToString(),
|
||||
["bundleDigest"] = result.BundleDigest,
|
||||
["sourceEnvironment"] = result.SourceEnvironment
|
||||
});
|
||||
|
||||
var outputs = new JobCapsuleOutputs(
|
||||
Status: "completed",
|
||||
ExitCode: 0,
|
||||
ResultSummary: $"Bundle created with {result.IncludedDomains.Count} domains",
|
||||
ResultHash: result.BundleDigest,
|
||||
DurationSeconds: result.DurationSeconds,
|
||||
RetryCount: 0,
|
||||
Error: null);
|
||||
|
||||
var artifacts = result.Exports.Select(e => new JobCapsuleArtifact(
|
||||
Name: e.Key,
|
||||
Digest: e.ArtifactDigest,
|
||||
SizeBytes: 0,
|
||||
MediaType: "application/json",
|
||||
StorageUri: null,
|
||||
Attributes: new Dictionary<string, string> { ["format"] = e.Format.ToString() })).ToList();
|
||||
|
||||
var capsuleResult = await _capsuleGenerator.GenerateJobCompletionCapsuleAsync(
|
||||
capsuleRequest, outputs, artifacts, cancellationToken);
|
||||
|
||||
// Emit timeline event
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["bundleDigest"] = result.BundleDigest;
|
||||
attributes["domainsCount"] = result.IncludedDomains.Count.ToString();
|
||||
attributes["sizeBytes"] = result.BundleSizeBytes.ToString();
|
||||
attributes["durationSeconds"] = result.DurationSeconds.ToString("F2");
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.BundleCompleted,
|
||||
payload: new
|
||||
{
|
||||
operationId = context.OperationId,
|
||||
bundleDigest = result.BundleDigest,
|
||||
manifestDigest = result.ManifestDigest,
|
||||
includedDomains = result.IncludedDomains,
|
||||
sizeBytes = result.BundleSizeBytes,
|
||||
durationSeconds = result.DurationSeconds,
|
||||
provenanceUri = result.ProvenanceUri,
|
||||
auditTrailUri = result.AuditTrailUri
|
||||
},
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded bundle completed for job {JobId} operation {OperationId}, digest {BundleDigest}",
|
||||
context.JobId, context.OperationId, result.BundleDigest);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: capsuleResult.Capsule?.CapsuleId,
|
||||
EvidencePointer: capsuleResult.EvidencePointer,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record bundle completed for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordBundleFailedAsync(
|
||||
MirrorOperationContext context,
|
||||
string errorCode,
|
||||
string errorMessage,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: context.OperationId,
|
||||
OperationType: MirrorOperationType.BundleExport,
|
||||
TenantId: context.TenantId,
|
||||
ProjectId: context.ProjectId,
|
||||
JobId: context.JobId,
|
||||
Status: MirrorOperationStatus.Failed,
|
||||
StartedAt: DateTimeOffset.UtcNow,
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: context.SourceEnvironment,
|
||||
TargetEnvironment: context.TargetEnvironment,
|
||||
BundleDigest: null,
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: 0,
|
||||
RecordsCount: 0,
|
||||
SizeBytes: 0,
|
||||
DurationSeconds: 0,
|
||||
Error: new MirrorOperationError(errorCode, errorMessage));
|
||||
|
||||
await _evidenceStore.StoreAsync(evidence, cancellationToken);
|
||||
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["errorCode"] = errorCode;
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.BundleFailed,
|
||||
payload: new { operationId = context.OperationId, errorCode, errorMessage },
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogWarning(
|
||||
"Recorded bundle failed for job {JobId} operation {OperationId}: {ErrorCode} - {ErrorMessage}",
|
||||
context.JobId, context.OperationId, errorCode, errorMessage);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record bundle failed for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordImportStartedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorImportRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["validateSignatures"] = request.ValidateSignatures.ToString();
|
||||
attributes["verifyProvenance"] = request.VerifyProvenance.ToString();
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.ImportStarted,
|
||||
payload: new
|
||||
{
|
||||
operationId = context.OperationId,
|
||||
bundleUri = request.BundleUri,
|
||||
expectedDigest = request.ExpectedDigest,
|
||||
validateSignatures = request.ValidateSignatures,
|
||||
verifyProvenance = request.VerifyProvenance
|
||||
},
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded import started for job {JobId} operation {OperationId}",
|
||||
context.JobId, context.OperationId);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record import started for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordImportValidatedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorImportValidation validation,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["isValid"] = validation.IsValid.ToString();
|
||||
attributes["signatureVerified"] = validation.SignatureVerified.ToString();
|
||||
attributes["provenanceVerified"] = validation.ProvenanceVerified.ToString();
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.ImportValidated,
|
||||
payload: validation,
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record import validated for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordImportCompletedAsync(
|
||||
MirrorOperationContext context,
|
||||
MirrorImportResult result,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: context.OperationId,
|
||||
OperationType: MirrorOperationType.BundleImport,
|
||||
TenantId: context.TenantId,
|
||||
ProjectId: context.ProjectId,
|
||||
JobId: context.JobId,
|
||||
Status: MirrorOperationStatus.Completed,
|
||||
StartedAt: DateTimeOffset.UtcNow.AddSeconds(-result.DurationSeconds),
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: result.Provenance.SourceEnvironment,
|
||||
TargetEnvironment: context.TargetEnvironment,
|
||||
BundleDigest: result.Provenance.BundleDigest,
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: result.DomainsImported,
|
||||
RecordsCount: result.RecordsImported,
|
||||
SizeBytes: 0,
|
||||
DurationSeconds: result.DurationSeconds,
|
||||
Error: null);
|
||||
|
||||
await _evidenceStore.StoreAsync(evidence, cancellationToken);
|
||||
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["domainsImported"] = result.DomainsImported.ToString();
|
||||
attributes["recordsImported"] = result.RecordsImported.ToString();
|
||||
attributes["durationSeconds"] = result.DurationSeconds.ToString("F2");
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.ImportCompleted,
|
||||
payload: new
|
||||
{
|
||||
operationId = context.OperationId,
|
||||
domainsImported = result.DomainsImported,
|
||||
recordsImported = result.RecordsImported,
|
||||
durationSeconds = result.DurationSeconds,
|
||||
provenance = result.Provenance
|
||||
},
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded import completed for job {JobId} operation {OperationId}, {DomainsImported} domains",
|
||||
context.JobId, context.OperationId, result.DomainsImported);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record import completed for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<MirrorOperationRecordResult> RecordImportFailedAsync(
|
||||
MirrorOperationContext context,
|
||||
string errorCode,
|
||||
string errorMessage,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: context.OperationId,
|
||||
OperationType: MirrorOperationType.BundleImport,
|
||||
TenantId: context.TenantId,
|
||||
ProjectId: context.ProjectId,
|
||||
JobId: context.JobId,
|
||||
Status: MirrorOperationStatus.Failed,
|
||||
StartedAt: DateTimeOffset.UtcNow,
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: context.SourceEnvironment,
|
||||
TargetEnvironment: context.TargetEnvironment,
|
||||
BundleDigest: null,
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: 0,
|
||||
RecordsCount: 0,
|
||||
SizeBytes: 0,
|
||||
DurationSeconds: 0,
|
||||
Error: new MirrorOperationError(errorCode, errorMessage));
|
||||
|
||||
await _evidenceStore.StoreAsync(evidence, cancellationToken);
|
||||
|
||||
var attributes = CreateBaseAttributes(context);
|
||||
attributes["errorCode"] = errorCode;
|
||||
|
||||
var emitResult = await _timelineEmitter.EmitJobEventAsync(
|
||||
context.TenantId,
|
||||
context.JobId,
|
||||
MirrorEventTypes.ImportFailed,
|
||||
payload: new { operationId = context.OperationId, errorCode, errorMessage },
|
||||
actor: context.Actor,
|
||||
correlationId: context.OperationId.ToString(),
|
||||
traceId: context.TraceId,
|
||||
projectId: context.ProjectId,
|
||||
attributes: attributes,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogWarning(
|
||||
"Recorded import failed for job {JobId} operation {OperationId}: {ErrorCode} - {ErrorMessage}",
|
||||
context.JobId, context.OperationId, errorCode, errorMessage);
|
||||
|
||||
return new MirrorOperationRecordResult(
|
||||
Success: emitResult.Success,
|
||||
EventId: emitResult.Event.EventId,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: emitResult.Error);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to record import failed for job {JobId}", context.JobId);
|
||||
return new MirrorOperationRecordResult(false, null, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> CreateBaseAttributes(MirrorOperationContext context) =>
|
||||
new()
|
||||
{
|
||||
["operationId"] = context.OperationId.ToString(),
|
||||
["jobType"] = context.JobType,
|
||||
["sourceEnvironment"] = context.SourceEnvironment
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence record for mirror operations.
|
||||
/// </summary>
|
||||
public sealed record MirrorOperationEvidence(
|
||||
Guid OperationId,
|
||||
MirrorOperationType OperationType,
|
||||
string TenantId,
|
||||
string? ProjectId,
|
||||
Guid JobId,
|
||||
MirrorOperationStatus Status,
|
||||
DateTimeOffset StartedAt,
|
||||
DateTimeOffset CompletedAt,
|
||||
string SourceEnvironment,
|
||||
string? TargetEnvironment,
|
||||
string? BundleDigest,
|
||||
string? ManifestDigest,
|
||||
string? ProvenanceUri,
|
||||
string? AuditTrailUri,
|
||||
int DomainsCount,
|
||||
int RecordsCount,
|
||||
long SizeBytes,
|
||||
double DurationSeconds,
|
||||
MirrorOperationError? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Error details for mirror operations.
|
||||
/// </summary>
|
||||
public sealed record MirrorOperationError(string Code, string Message);
|
||||
|
||||
/// <summary>
|
||||
/// Types of mirror operations.
|
||||
/// </summary>
|
||||
public enum MirrorOperationType
|
||||
{
|
||||
BundleExport,
|
||||
BundleImport,
|
||||
BundleVerify,
|
||||
BundleSync,
|
||||
BundleDiff
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of mirror operations.
|
||||
/// </summary>
|
||||
public enum MirrorOperationStatus
|
||||
{
|
||||
Started,
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
Cancelled
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Store for mirror operation evidence.
|
||||
/// </summary>
|
||||
public interface IMirrorEvidenceStore
|
||||
{
|
||||
Task StoreAsync(MirrorOperationEvidence evidence, CancellationToken cancellationToken = default);
|
||||
Task<MirrorOperationEvidence?> GetAsync(Guid operationId, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<MirrorOperationEvidence>> ListForJobAsync(Guid jobId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory mirror evidence store for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryMirrorEvidenceStore : IMirrorEvidenceStore
|
||||
{
|
||||
private readonly Dictionary<Guid, MirrorOperationEvidence> _evidence = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task StoreAsync(MirrorOperationEvidence evidence, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock) { _evidence[evidence.OperationId] = evidence; }
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<MirrorOperationEvidence?> GetAsync(Guid operationId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock) { return Task.FromResult(_evidence.GetValueOrDefault(operationId)); }
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<MirrorOperationEvidence>> ListForJobAsync(Guid jobId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _evidence.Values.Where(e => e.JobId == jobId).ToList();
|
||||
return Task.FromResult<IReadOnlyList<MirrorOperationEvidence>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
public void Clear() { lock (_lock) { _evidence.Clear(); } }
|
||||
public int Count { get { lock (_lock) { return _evidence.Count; } } }
|
||||
}
|
||||
@@ -0,0 +1,362 @@
|
||||
namespace StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a pack in the registry with tenant/project scoping.
|
||||
/// Per 150.B-PacksRegistry: Pack versioning and lifecycle management.
|
||||
/// </summary>
|
||||
public sealed record Pack(
|
||||
Guid PackId,
|
||||
string TenantId,
|
||||
string? ProjectId,
|
||||
string Name,
|
||||
string DisplayName,
|
||||
string? Description,
|
||||
PackStatus Status,
|
||||
string CreatedBy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? UpdatedBy,
|
||||
string? Metadata,
|
||||
string? Tags,
|
||||
string? IconUri,
|
||||
int VersionCount,
|
||||
string? LatestVersion,
|
||||
DateTimeOffset? PublishedAt,
|
||||
string? PublishedBy)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new pack.
|
||||
/// </summary>
|
||||
public static Pack Create(
|
||||
Guid packId,
|
||||
string tenantId,
|
||||
string? projectId,
|
||||
string name,
|
||||
string displayName,
|
||||
string? description,
|
||||
string createdBy,
|
||||
string? metadata = null,
|
||||
string? tags = null,
|
||||
string? iconUri = null,
|
||||
DateTimeOffset? createdAt = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(name);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(displayName);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(createdBy);
|
||||
|
||||
var now = createdAt ?? DateTimeOffset.UtcNow;
|
||||
|
||||
return new Pack(
|
||||
PackId: packId,
|
||||
TenantId: tenantId,
|
||||
ProjectId: projectId,
|
||||
Name: name.ToLowerInvariant(),
|
||||
DisplayName: displayName,
|
||||
Description: description,
|
||||
Status: PackStatus.Draft,
|
||||
CreatedBy: createdBy,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
UpdatedBy: null,
|
||||
Metadata: metadata,
|
||||
Tags: tags,
|
||||
IconUri: iconUri,
|
||||
VersionCount: 0,
|
||||
LatestVersion: null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether the pack is in a terminal state.
|
||||
/// </summary>
|
||||
public bool IsTerminal => Status is PackStatus.Archived;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the pack can accept new versions.
|
||||
/// </summary>
|
||||
public bool CanAddVersion => Status is PackStatus.Draft or PackStatus.Published;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the pack can be published.
|
||||
/// </summary>
|
||||
public bool CanPublish => Status == PackStatus.Draft && VersionCount > 0;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the pack can be deprecated.
|
||||
/// </summary>
|
||||
public bool CanDeprecate => Status == PackStatus.Published;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the pack can be archived.
|
||||
/// </summary>
|
||||
public bool CanArchive => Status is PackStatus.Draft or PackStatus.Deprecated;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with updated status.
|
||||
/// </summary>
|
||||
public Pack WithStatus(PackStatus newStatus, string updatedBy, DateTimeOffset? updatedAt = null)
|
||||
{
|
||||
var now = updatedAt ?? DateTimeOffset.UtcNow;
|
||||
return this with
|
||||
{
|
||||
Status = newStatus,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = updatedBy,
|
||||
PublishedAt = newStatus == PackStatus.Published ? now : PublishedAt,
|
||||
PublishedBy = newStatus == PackStatus.Published ? updatedBy : PublishedBy
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with incremented version count.
|
||||
/// </summary>
|
||||
public Pack WithVersionAdded(string version, string updatedBy, DateTimeOffset? updatedAt = null)
|
||||
{
|
||||
var now = updatedAt ?? DateTimeOffset.UtcNow;
|
||||
return this with
|
||||
{
|
||||
VersionCount = VersionCount + 1,
|
||||
LatestVersion = version,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = updatedBy
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pack lifecycle status.
|
||||
/// </summary>
|
||||
public enum PackStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Pack is in draft mode, not yet published.
|
||||
/// </summary>
|
||||
Draft = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Pack is published and available for use.
|
||||
/// </summary>
|
||||
Published = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Pack is deprecated but still usable.
|
||||
/// </summary>
|
||||
Deprecated = 2,
|
||||
|
||||
/// <summary>
|
||||
/// Pack is archived and no longer usable.
|
||||
/// </summary>
|
||||
Archived = 3
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a version of a pack with artifact provenance.
|
||||
/// Per 150.B-PacksRegistry: Pack artifact storage with provenance metadata.
|
||||
/// </summary>
|
||||
public sealed record PackVersion(
|
||||
Guid PackVersionId,
|
||||
string TenantId,
|
||||
Guid PackId,
|
||||
string Version,
|
||||
string? SemVer,
|
||||
PackVersionStatus Status,
|
||||
string ArtifactUri,
|
||||
string ArtifactDigest,
|
||||
string? ArtifactMimeType,
|
||||
long? ArtifactSizeBytes,
|
||||
string? ManifestJson,
|
||||
string? ManifestDigest,
|
||||
string? ReleaseNotes,
|
||||
string? MinEngineVersion,
|
||||
string? Dependencies,
|
||||
string CreatedBy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? UpdatedBy,
|
||||
DateTimeOffset? PublishedAt,
|
||||
string? PublishedBy,
|
||||
DateTimeOffset? DeprecatedAt,
|
||||
string? DeprecatedBy,
|
||||
string? DeprecationReason,
|
||||
string? SignatureUri,
|
||||
string? SignatureAlgorithm,
|
||||
string? SignedBy,
|
||||
DateTimeOffset? SignedAt,
|
||||
string? Metadata,
|
||||
int DownloadCount)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new pack version.
|
||||
/// </summary>
|
||||
public static PackVersion Create(
|
||||
Guid packVersionId,
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
string version,
|
||||
string? semVer,
|
||||
string artifactUri,
|
||||
string artifactDigest,
|
||||
string? artifactMimeType,
|
||||
long? artifactSizeBytes,
|
||||
string? manifestJson,
|
||||
string? manifestDigest,
|
||||
string? releaseNotes,
|
||||
string? minEngineVersion,
|
||||
string? dependencies,
|
||||
string createdBy,
|
||||
string? metadata = null,
|
||||
DateTimeOffset? createdAt = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(version);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(artifactUri);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(artifactDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(createdBy);
|
||||
|
||||
var now = createdAt ?? DateTimeOffset.UtcNow;
|
||||
|
||||
return new PackVersion(
|
||||
PackVersionId: packVersionId,
|
||||
TenantId: tenantId,
|
||||
PackId: packId,
|
||||
Version: version,
|
||||
SemVer: semVer,
|
||||
Status: PackVersionStatus.Draft,
|
||||
ArtifactUri: artifactUri,
|
||||
ArtifactDigest: artifactDigest,
|
||||
ArtifactMimeType: artifactMimeType,
|
||||
ArtifactSizeBytes: artifactSizeBytes,
|
||||
ManifestJson: manifestJson,
|
||||
ManifestDigest: manifestDigest,
|
||||
ReleaseNotes: releaseNotes,
|
||||
MinEngineVersion: minEngineVersion,
|
||||
Dependencies: dependencies,
|
||||
CreatedBy: createdBy,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
UpdatedBy: null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null,
|
||||
DeprecatedAt: null,
|
||||
DeprecatedBy: null,
|
||||
DeprecationReason: null,
|
||||
SignatureUri: null,
|
||||
SignatureAlgorithm: null,
|
||||
SignedBy: null,
|
||||
SignedAt: null,
|
||||
Metadata: metadata,
|
||||
DownloadCount: 0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether the version is in a terminal state.
|
||||
/// </summary>
|
||||
public bool IsTerminal => Status == PackVersionStatus.Archived;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the version can be published.
|
||||
/// </summary>
|
||||
public bool CanPublish => Status == PackVersionStatus.Draft;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the version can be deprecated.
|
||||
/// </summary>
|
||||
public bool CanDeprecate => Status == PackVersionStatus.Published;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the version can be archived.
|
||||
/// </summary>
|
||||
public bool CanArchive => Status is PackVersionStatus.Draft or PackVersionStatus.Deprecated;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the version is signed.
|
||||
/// </summary>
|
||||
public bool IsSigned => !string.IsNullOrEmpty(SignatureUri);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with updated status.
|
||||
/// </summary>
|
||||
public PackVersion WithStatus(PackVersionStatus newStatus, string updatedBy, DateTimeOffset? updatedAt = null)
|
||||
{
|
||||
var now = updatedAt ?? DateTimeOffset.UtcNow;
|
||||
return this with
|
||||
{
|
||||
Status = newStatus,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = updatedBy,
|
||||
PublishedAt = newStatus == PackVersionStatus.Published ? now : PublishedAt,
|
||||
PublishedBy = newStatus == PackVersionStatus.Published ? updatedBy : PublishedBy
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with deprecation info.
|
||||
/// </summary>
|
||||
public PackVersion WithDeprecation(string deprecatedBy, string? reason, DateTimeOffset? deprecatedAt = null)
|
||||
{
|
||||
var now = deprecatedAt ?? DateTimeOffset.UtcNow;
|
||||
return this with
|
||||
{
|
||||
Status = PackVersionStatus.Deprecated,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = deprecatedBy,
|
||||
DeprecatedAt = now,
|
||||
DeprecatedBy = deprecatedBy,
|
||||
DeprecationReason = reason
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with signature info.
|
||||
/// </summary>
|
||||
public PackVersion WithSignature(
|
||||
string signatureUri,
|
||||
string signatureAlgorithm,
|
||||
string signedBy,
|
||||
DateTimeOffset? signedAt = null)
|
||||
{
|
||||
var now = signedAt ?? DateTimeOffset.UtcNow;
|
||||
return this with
|
||||
{
|
||||
SignatureUri = signatureUri,
|
||||
SignatureAlgorithm = signatureAlgorithm,
|
||||
SignedBy = signedBy,
|
||||
SignedAt = now,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = signedBy
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with incremented download count.
|
||||
/// </summary>
|
||||
public PackVersion WithDownload() => this with { DownloadCount = DownloadCount + 1 };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pack version lifecycle status.
|
||||
/// </summary>
|
||||
public enum PackVersionStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Version is in draft mode.
|
||||
/// </summary>
|
||||
Draft = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Version is published and available.
|
||||
/// </summary>
|
||||
Published = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Version is deprecated but still available.
|
||||
/// </summary>
|
||||
Deprecated = 2,
|
||||
|
||||
/// <summary>
|
||||
/// Version is archived and no longer available.
|
||||
/// </summary>
|
||||
Archived = 3
|
||||
}
|
||||
@@ -0,0 +1,301 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// DSSE attestation for orchestrator-scheduled jobs.
|
||||
/// Per ORCH-OBS-54-001.
|
||||
/// </summary>
|
||||
public sealed record JobAttestation(
|
||||
/// <summary>Attestation identifier.</summary>
|
||||
Guid AttestationId,
|
||||
|
||||
/// <summary>Tenant scope.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Job being attested.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Optional run identifier.</summary>
|
||||
Guid? RunId,
|
||||
|
||||
/// <summary>Optional project identifier.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>In-toto statement type.</summary>
|
||||
string StatementType,
|
||||
|
||||
/// <summary>Predicate type URI.</summary>
|
||||
string PredicateType,
|
||||
|
||||
/// <summary>Attestation subjects.</summary>
|
||||
IReadOnlyList<AttestationSubject> Subjects,
|
||||
|
||||
/// <summary>DSSE envelope containing the signed statement.</summary>
|
||||
DsseEnvelope Envelope,
|
||||
|
||||
/// <summary>When attestation was created.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>Digest of the attestation payload.</summary>
|
||||
string PayloadDigest,
|
||||
|
||||
/// <summary>Optional evidence pointer for Evidence Locker.</summary>
|
||||
EvidencePointer? EvidencePointer)
|
||||
{
|
||||
/// <summary>Current schema version.</summary>
|
||||
public const string CurrentSchemaVersion = "1.0.0";
|
||||
|
||||
/// <summary>In-toto statement type v1.</summary>
|
||||
public const string InTotoStatementV1 = "https://in-toto.io/Statement/v1";
|
||||
|
||||
/// <summary>In-toto statement type v0.1.</summary>
|
||||
public const string InTotoStatementV01 = "https://in-toto.io/Statement/v0.1";
|
||||
|
||||
/// <summary>
|
||||
/// Serializes the attestation to JSON.
|
||||
/// </summary>
|
||||
public string ToJson()
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
return JsonSerializer.Serialize(this, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deserializes an attestation from JSON.
|
||||
/// </summary>
|
||||
public static JobAttestation? FromJson(string json)
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
return JsonSerializer.Deserialize<JobAttestation>(json, options);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subject of an attestation.
|
||||
/// </summary>
|
||||
public sealed record AttestationSubject(
|
||||
/// <summary>Subject name/URI.</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Subject digests keyed by algorithm.</summary>
|
||||
IReadOnlyDictionary<string, string> Digest);
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope containing signed attestation.
|
||||
/// </summary>
|
||||
public sealed record DsseEnvelope(
|
||||
/// <summary>Base64-encoded payload.</summary>
|
||||
string Payload,
|
||||
|
||||
/// <summary>Payload MIME type.</summary>
|
||||
string PayloadType,
|
||||
|
||||
/// <summary>Signatures over the envelope.</summary>
|
||||
IReadOnlyList<DsseSignature> Signatures)
|
||||
{
|
||||
/// <summary>Standard payload type for in-toto statements.</summary>
|
||||
public const string InTotoPayloadType = "application/vnd.in-toto+json";
|
||||
|
||||
/// <summary>
|
||||
/// Decodes the payload from base64.
|
||||
/// </summary>
|
||||
public byte[] DecodePayload() => Convert.FromBase64String(Payload);
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 digest of the payload.
|
||||
/// </summary>
|
||||
public string ComputePayloadDigest()
|
||||
{
|
||||
var payloadBytes = DecodePayload();
|
||||
var hash = SHA256.HashData(payloadBytes);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature.
|
||||
/// </summary>
|
||||
public sealed record DsseSignature(
|
||||
/// <summary>Key identifier.</summary>
|
||||
string? KeyId,
|
||||
|
||||
/// <summary>Base64-encoded signature.</summary>
|
||||
string Sig);
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for job attestation.
|
||||
/// </summary>
|
||||
public sealed record InTotoStatement(
|
||||
/// <summary>Statement type.</summary>
|
||||
[property: JsonPropertyName("_type")]
|
||||
string Type,
|
||||
|
||||
/// <summary>Subjects being attested.</summary>
|
||||
IReadOnlyList<InTotoSubject> Subject,
|
||||
|
||||
/// <summary>Predicate type URI.</summary>
|
||||
string PredicateType,
|
||||
|
||||
/// <summary>Predicate content.</summary>
|
||||
JsonElement Predicate)
|
||||
{
|
||||
/// <summary>
|
||||
/// Serializes to canonical JSON (deterministic).
|
||||
/// </summary>
|
||||
public byte[] ToCanonicalJson()
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
|
||||
// Build canonical structure with sorted keys
|
||||
var canonical = new Dictionary<string, object>
|
||||
{
|
||||
["_type"] = Type,
|
||||
["predicateType"] = PredicateType,
|
||||
["subject"] = Subject.Select(s => new Dictionary<string, object>
|
||||
{
|
||||
["digest"] = s.Digest.OrderBy(kvp => kvp.Key).ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
|
||||
["name"] = s.Name
|
||||
}).ToList()
|
||||
};
|
||||
|
||||
// Add predicate
|
||||
canonical["predicate"] = Predicate;
|
||||
|
||||
return JsonSerializer.SerializeToUtf8Bytes(canonical, options);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-toto subject.
|
||||
/// </summary>
|
||||
public sealed record InTotoSubject(
|
||||
/// <summary>Subject name.</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Subject digests.</summary>
|
||||
IReadOnlyDictionary<string, string> Digest);
|
||||
|
||||
/// <summary>
|
||||
/// Predicate for job completion attestation.
|
||||
/// </summary>
|
||||
public sealed record JobCompletionPredicate(
|
||||
/// <summary>Job identifier.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Run identifier.</summary>
|
||||
Guid? RunId,
|
||||
|
||||
/// <summary>Job type.</summary>
|
||||
string JobType,
|
||||
|
||||
/// <summary>Tenant identifier.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Project identifier.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Job status.</summary>
|
||||
string Status,
|
||||
|
||||
/// <summary>Exit code if applicable.</summary>
|
||||
int? ExitCode,
|
||||
|
||||
/// <summary>When job started.</summary>
|
||||
DateTimeOffset? StartedAt,
|
||||
|
||||
/// <summary>When job completed.</summary>
|
||||
DateTimeOffset CompletedAt,
|
||||
|
||||
/// <summary>Duration in seconds.</summary>
|
||||
double DurationSeconds,
|
||||
|
||||
/// <summary>Input hash.</summary>
|
||||
string? InputHash,
|
||||
|
||||
/// <summary>Output hash.</summary>
|
||||
string? OutputHash,
|
||||
|
||||
/// <summary>Artifact digests.</summary>
|
||||
IReadOnlyList<ArtifactDigest>? Artifacts,
|
||||
|
||||
/// <summary>Environment information.</summary>
|
||||
JobEnvironmentInfo? Environment,
|
||||
|
||||
/// <summary>Evidence capsule reference.</summary>
|
||||
string? CapsuleId,
|
||||
|
||||
/// <summary>Evidence capsule digest.</summary>
|
||||
string? CapsuleDigest);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact digest record.
|
||||
/// </summary>
|
||||
public sealed record ArtifactDigest(
|
||||
/// <summary>Artifact name.</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Artifact digest.</summary>
|
||||
string Digest,
|
||||
|
||||
/// <summary>Artifact size in bytes.</summary>
|
||||
long SizeBytes);
|
||||
|
||||
/// <summary>
|
||||
/// Job environment information for attestation.
|
||||
/// </summary>
|
||||
public sealed record JobEnvironmentInfo(
|
||||
/// <summary>Worker node identifier.</summary>
|
||||
string? WorkerNode,
|
||||
|
||||
/// <summary>Runtime version.</summary>
|
||||
string? RuntimeVersion,
|
||||
|
||||
/// <summary>Container image digest.</summary>
|
||||
string? ImageDigest);
|
||||
|
||||
/// <summary>
|
||||
/// Well-known predicate types for StellaOps.
|
||||
/// </summary>
|
||||
public static class JobPredicateTypes
|
||||
{
|
||||
/// <summary>Job completion attestation.</summary>
|
||||
public const string JobCompletion = "stella.ops/job-completion@v1";
|
||||
|
||||
/// <summary>Job scheduling attestation.</summary>
|
||||
public const string JobScheduling = "stella.ops/job-scheduling@v1";
|
||||
|
||||
/// <summary>Run completion attestation.</summary>
|
||||
public const string RunCompletion = "stella.ops/run-completion@v1";
|
||||
|
||||
/// <summary>Evidence bundle attestation.</summary>
|
||||
public const string Evidence = "stella.ops/evidence@v1";
|
||||
|
||||
/// <summary>Mirror bundle attestation.</summary>
|
||||
public const string MirrorBundle = "stella.ops/mirror-bundle@v1";
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a predicate type is a StellaOps type.
|
||||
/// </summary>
|
||||
public static bool IsStellaOpsType(string predicateType)
|
||||
=> predicateType.StartsWith("stella.ops/", StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,819 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Service for generating DSSE attestations for orchestrator jobs.
|
||||
/// Per ORCH-OBS-54-001.
|
||||
/// </summary>
|
||||
public interface IJobAttestationService
|
||||
{
|
||||
/// <summary>
|
||||
/// Generates a job completion attestation.
|
||||
/// </summary>
|
||||
Task<JobAttestationResult> GenerateJobCompletionAttestationAsync(
|
||||
JobAttestationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates a job scheduling attestation.
|
||||
/// </summary>
|
||||
Task<JobAttestationResult> GenerateJobSchedulingAttestationAsync(
|
||||
JobAttestationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates a run completion attestation.
|
||||
/// </summary>
|
||||
Task<JobAttestationResult> GenerateRunCompletionAttestationAsync(
|
||||
string tenantId,
|
||||
Guid runId,
|
||||
string? projectId,
|
||||
IReadOnlyList<JobAttestation> jobAttestations,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an attestation by job ID.
|
||||
/// </summary>
|
||||
Task<JobAttestation?> GetJobAttestationAsync(
|
||||
Guid jobId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies an attestation's signatures.
|
||||
/// </summary>
|
||||
Task<AttestationVerificationResult> VerifyAttestationAsync(
|
||||
JobAttestation attestation,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for generating a job attestation.
|
||||
/// </summary>
|
||||
public sealed record JobAttestationRequest(
|
||||
/// <summary>Tenant identifier.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Job identifier.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Optional run identifier.</summary>
|
||||
Guid? RunId,
|
||||
|
||||
/// <summary>Job type.</summary>
|
||||
string JobType,
|
||||
|
||||
/// <summary>Optional project identifier.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Job status.</summary>
|
||||
string Status,
|
||||
|
||||
/// <summary>Exit code if applicable.</summary>
|
||||
int? ExitCode,
|
||||
|
||||
/// <summary>When job started.</summary>
|
||||
DateTimeOffset? StartedAt,
|
||||
|
||||
/// <summary>When job completed.</summary>
|
||||
DateTimeOffset? CompletedAt,
|
||||
|
||||
/// <summary>Duration in seconds.</summary>
|
||||
double DurationSeconds,
|
||||
|
||||
/// <summary>Input payload JSON.</summary>
|
||||
string? InputPayloadJson,
|
||||
|
||||
/// <summary>Output payload JSON.</summary>
|
||||
string? OutputPayloadJson,
|
||||
|
||||
/// <summary>Artifact information.</summary>
|
||||
IReadOnlyList<JobCapsuleArtifact>? Artifacts,
|
||||
|
||||
/// <summary>Environment information.</summary>
|
||||
JobCapsuleEnvironment? Environment,
|
||||
|
||||
/// <summary>Evidence capsule if available.</summary>
|
||||
JobCapsule? Capsule);
|
||||
|
||||
/// <summary>
|
||||
/// Result of generating a job attestation.
|
||||
/// </summary>
|
||||
public sealed record JobAttestationResult(
|
||||
/// <summary>Whether generation succeeded.</summary>
|
||||
bool Success,
|
||||
|
||||
/// <summary>Generated attestation.</summary>
|
||||
JobAttestation? Attestation,
|
||||
|
||||
/// <summary>Evidence pointer for timeline.</summary>
|
||||
EvidencePointer? EvidencePointer,
|
||||
|
||||
/// <summary>Error message if failed.</summary>
|
||||
string? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying an attestation.
|
||||
/// </summary>
|
||||
public sealed record AttestationVerificationResult(
|
||||
/// <summary>Whether verification succeeded.</summary>
|
||||
bool Valid,
|
||||
|
||||
/// <summary>Key ID that signed the attestation.</summary>
|
||||
string? SigningKeyId,
|
||||
|
||||
/// <summary>When attestation was created.</summary>
|
||||
DateTimeOffset? CreatedAt,
|
||||
|
||||
/// <summary>Verification warnings.</summary>
|
||||
IReadOnlyList<string>? Warnings,
|
||||
|
||||
/// <summary>Error message if verification failed.</summary>
|
||||
string? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Signer interface for DSSE attestations.
|
||||
/// </summary>
|
||||
public interface IJobAttestationSigner
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs a payload and returns a DSSE envelope.
|
||||
/// </summary>
|
||||
Task<DsseEnvelope> SignAsync(
|
||||
byte[] payload,
|
||||
string payloadType,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a DSSE envelope signature.
|
||||
/// </summary>
|
||||
Task<bool> VerifyAsync(
|
||||
DsseEnvelope envelope,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current signing key ID.
|
||||
/// </summary>
|
||||
string GetCurrentKeyId();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Store for job attestations.
|
||||
/// </summary>
|
||||
public interface IJobAttestationStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Stores an attestation.
|
||||
/// </summary>
|
||||
Task StoreAsync(JobAttestation attestation, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves an attestation by ID.
|
||||
/// </summary>
|
||||
Task<JobAttestation?> GetAsync(Guid attestationId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves attestations for a job.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<JobAttestation>> GetForJobAsync(Guid jobId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves attestations for a run.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<JobAttestation>> GetForRunAsync(Guid runId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of job attestation service.
|
||||
/// </summary>
|
||||
public sealed class JobAttestationService : IJobAttestationService
|
||||
{
|
||||
private readonly IJobAttestationSigner _signer;
|
||||
private readonly IJobAttestationStore _store;
|
||||
private readonly ITimelineEventEmitter _timelineEmitter;
|
||||
private readonly ILogger<JobAttestationService> _logger;
|
||||
|
||||
public JobAttestationService(
|
||||
IJobAttestationSigner signer,
|
||||
IJobAttestationStore store,
|
||||
ITimelineEventEmitter timelineEmitter,
|
||||
ILogger<JobAttestationService> logger)
|
||||
{
|
||||
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_timelineEmitter = timelineEmitter ?? throw new ArgumentNullException(nameof(timelineEmitter));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<JobAttestationResult> GenerateJobCompletionAttestationAsync(
|
||||
JobAttestationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Generating job completion attestation for job {JobId} tenant {TenantId}",
|
||||
request.JobId, request.TenantId);
|
||||
|
||||
// Build predicate
|
||||
var predicate = new JobCompletionPredicate(
|
||||
JobId: request.JobId,
|
||||
RunId: request.RunId,
|
||||
JobType: request.JobType,
|
||||
TenantId: request.TenantId,
|
||||
ProjectId: request.ProjectId,
|
||||
Status: request.Status,
|
||||
ExitCode: request.ExitCode,
|
||||
StartedAt: request.StartedAt,
|
||||
CompletedAt: request.CompletedAt ?? DateTimeOffset.UtcNow,
|
||||
DurationSeconds: request.DurationSeconds,
|
||||
InputHash: ComputePayloadHash(request.InputPayloadJson),
|
||||
OutputHash: ComputePayloadHash(request.OutputPayloadJson),
|
||||
Artifacts: request.Artifacts?.Select(a =>
|
||||
new ArtifactDigest(a.Name, a.Digest, a.SizeBytes)).ToList(),
|
||||
Environment: request.Environment is not null
|
||||
? new JobEnvironmentInfo(
|
||||
request.Environment.WorkerNode,
|
||||
request.Environment.RuntimeVersion,
|
||||
ImageDigest: null)
|
||||
: null,
|
||||
CapsuleId: request.Capsule?.CapsuleId.ToString(),
|
||||
CapsuleDigest: request.Capsule?.RootHash);
|
||||
|
||||
// Build subjects
|
||||
var subjects = new List<InTotoSubject>
|
||||
{
|
||||
new($"job:{request.TenantId}/{request.JobId}", new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = ComputeSubjectDigest(request.JobId, request.TenantId, request.JobType)
|
||||
})
|
||||
};
|
||||
|
||||
if (request.Capsule is not null)
|
||||
{
|
||||
subjects.Add(new($"capsule:{request.Capsule.CapsuleId}", new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = request.Capsule.RootHash.Replace("sha256:", "")
|
||||
}));
|
||||
}
|
||||
|
||||
// Create attestation
|
||||
var attestation = await CreateAttestationAsync(
|
||||
request.TenantId,
|
||||
request.JobId,
|
||||
request.RunId,
|
||||
request.ProjectId,
|
||||
JobPredicateTypes.JobCompletion,
|
||||
subjects,
|
||||
predicate,
|
||||
cancellationToken);
|
||||
|
||||
// Store attestation
|
||||
await _store.StoreAsync(attestation, cancellationToken);
|
||||
|
||||
// Emit timeline event
|
||||
await EmitAttestationEventAsync(attestation, "job.attestation.created", cancellationToken);
|
||||
|
||||
var evidencePointer = new EvidencePointer(
|
||||
Type: EvidencePointerType.Attestation,
|
||||
BundleId: attestation.AttestationId,
|
||||
BundleDigest: attestation.PayloadDigest,
|
||||
AttestationSubject: $"job:{request.TenantId}/{request.JobId}",
|
||||
AttestationDigest: attestation.PayloadDigest,
|
||||
ManifestUri: null,
|
||||
LockerPath: $"attestations/{attestation.TenantId}/{attestation.AttestationId}.dsse");
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generated job completion attestation {AttestationId} for job {JobId}",
|
||||
attestation.AttestationId, request.JobId);
|
||||
|
||||
return new JobAttestationResult(true, attestation, evidencePointer, null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate job completion attestation for job {JobId}", request.JobId);
|
||||
return new JobAttestationResult(false, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<JobAttestationResult> GenerateJobSchedulingAttestationAsync(
|
||||
JobAttestationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Generating job scheduling attestation for job {JobId} tenant {TenantId}",
|
||||
request.JobId, request.TenantId);
|
||||
|
||||
// Build scheduling predicate
|
||||
var predicate = new
|
||||
{
|
||||
jobId = request.JobId,
|
||||
runId = request.RunId,
|
||||
jobType = request.JobType,
|
||||
tenantId = request.TenantId,
|
||||
projectId = request.ProjectId,
|
||||
scheduledAt = DateTimeOffset.UtcNow,
|
||||
inputHash = ComputePayloadHash(request.InputPayloadJson)
|
||||
};
|
||||
|
||||
var subjects = new List<InTotoSubject>
|
||||
{
|
||||
new($"job:{request.TenantId}/{request.JobId}", new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = ComputeSubjectDigest(request.JobId, request.TenantId, request.JobType)
|
||||
})
|
||||
};
|
||||
|
||||
var attestation = await CreateAttestationAsync(
|
||||
request.TenantId,
|
||||
request.JobId,
|
||||
request.RunId,
|
||||
request.ProjectId,
|
||||
JobPredicateTypes.JobScheduling,
|
||||
subjects,
|
||||
predicate,
|
||||
cancellationToken);
|
||||
|
||||
await _store.StoreAsync(attestation, cancellationToken);
|
||||
await EmitAttestationEventAsync(attestation, "job.attestation.scheduled", cancellationToken);
|
||||
|
||||
var evidencePointer = new EvidencePointer(
|
||||
Type: EvidencePointerType.Attestation,
|
||||
BundleId: attestation.AttestationId,
|
||||
BundleDigest: attestation.PayloadDigest,
|
||||
AttestationSubject: $"job:{request.TenantId}/{request.JobId}",
|
||||
AttestationDigest: attestation.PayloadDigest,
|
||||
ManifestUri: null,
|
||||
LockerPath: $"attestations/{attestation.TenantId}/{attestation.AttestationId}.dsse");
|
||||
|
||||
return new JobAttestationResult(true, attestation, evidencePointer, null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate job scheduling attestation for job {JobId}", request.JobId);
|
||||
return new JobAttestationResult(false, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<JobAttestationResult> GenerateRunCompletionAttestationAsync(
|
||||
string tenantId,
|
||||
Guid runId,
|
||||
string? projectId,
|
||||
IReadOnlyList<JobAttestation> jobAttestations,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Generating run completion attestation for run {RunId} with {JobCount} jobs",
|
||||
runId, jobAttestations.Count);
|
||||
|
||||
var predicate = new
|
||||
{
|
||||
runId,
|
||||
tenantId,
|
||||
projectId,
|
||||
completedAt = DateTimeOffset.UtcNow,
|
||||
jobCount = jobAttestations.Count,
|
||||
jobs = jobAttestations.Select(a => new
|
||||
{
|
||||
jobId = a.JobId,
|
||||
attestationId = a.AttestationId,
|
||||
payloadDigest = a.PayloadDigest
|
||||
}).ToList()
|
||||
};
|
||||
|
||||
var subjects = new List<InTotoSubject>
|
||||
{
|
||||
new($"run:{tenantId}/{runId}", new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = ComputeRunDigest(runId, tenantId, jobAttestations)
|
||||
})
|
||||
};
|
||||
|
||||
// Add each job attestation as a subject
|
||||
foreach (var jobAttestation in jobAttestations)
|
||||
{
|
||||
subjects.Add(new($"job-attestation:{jobAttestation.AttestationId}", new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = jobAttestation.PayloadDigest.Replace("sha256:", "")
|
||||
}));
|
||||
}
|
||||
|
||||
var attestation = await CreateAttestationAsync(
|
||||
tenantId,
|
||||
Guid.Empty, // No specific job for run attestation
|
||||
runId,
|
||||
projectId,
|
||||
JobPredicateTypes.RunCompletion,
|
||||
subjects,
|
||||
predicate,
|
||||
cancellationToken);
|
||||
|
||||
await _store.StoreAsync(attestation, cancellationToken);
|
||||
await EmitAttestationEventAsync(attestation, "run.attestation.created", cancellationToken);
|
||||
|
||||
var evidencePointer = new EvidencePointer(
|
||||
Type: EvidencePointerType.Attestation,
|
||||
BundleId: attestation.AttestationId,
|
||||
BundleDigest: attestation.PayloadDigest,
|
||||
AttestationSubject: $"run:{tenantId}/{runId}",
|
||||
AttestationDigest: attestation.PayloadDigest,
|
||||
ManifestUri: null,
|
||||
LockerPath: $"attestations/{tenantId}/runs/{runId}.dsse");
|
||||
|
||||
return new JobAttestationResult(true, attestation, evidencePointer, null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate run completion attestation for run {RunId}", runId);
|
||||
return new JobAttestationResult(false, null, null, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<JobAttestation?> GetJobAttestationAsync(
|
||||
Guid jobId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var attestations = await _store.GetForJobAsync(jobId, cancellationToken);
|
||||
// Return the most recent completion attestation
|
||||
return attestations
|
||||
.Where(a => a.PredicateType == JobPredicateTypes.JobCompletion)
|
||||
.OrderByDescending(a => a.CreatedAt)
|
||||
.FirstOrDefault();
|
||||
}
|
||||
|
||||
public async Task<AttestationVerificationResult> VerifyAttestationAsync(
|
||||
JobAttestation attestation,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var warnings = new List<string>();
|
||||
|
||||
// Verify envelope signatures
|
||||
var signatureValid = await _signer.VerifyAsync(attestation.Envelope, cancellationToken);
|
||||
if (!signatureValid)
|
||||
{
|
||||
return new AttestationVerificationResult(
|
||||
Valid: false,
|
||||
SigningKeyId: null,
|
||||
CreatedAt: attestation.CreatedAt,
|
||||
Warnings: null,
|
||||
Error: "Signature verification failed");
|
||||
}
|
||||
|
||||
// Verify payload digest
|
||||
var computedDigest = attestation.Envelope.ComputePayloadDigest();
|
||||
if (computedDigest != attestation.PayloadDigest)
|
||||
{
|
||||
return new AttestationVerificationResult(
|
||||
Valid: false,
|
||||
SigningKeyId: null,
|
||||
CreatedAt: attestation.CreatedAt,
|
||||
Warnings: null,
|
||||
Error: $"Payload digest mismatch: expected {attestation.PayloadDigest}, got {computedDigest}");
|
||||
}
|
||||
|
||||
// Check for expired signatures
|
||||
var primarySignature = attestation.Envelope.Signatures.FirstOrDefault();
|
||||
var keyId = primarySignature?.KeyId;
|
||||
|
||||
// Check age
|
||||
var age = DateTimeOffset.UtcNow - attestation.CreatedAt;
|
||||
if (age > TimeSpan.FromDays(365))
|
||||
{
|
||||
warnings.Add($"Attestation is older than 1 year ({age.Days} days)");
|
||||
}
|
||||
|
||||
return new AttestationVerificationResult(
|
||||
Valid: true,
|
||||
SigningKeyId: keyId,
|
||||
CreatedAt: attestation.CreatedAt,
|
||||
Warnings: warnings.Count > 0 ? warnings : null,
|
||||
Error: null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to verify attestation {AttestationId}", attestation.AttestationId);
|
||||
return new AttestationVerificationResult(
|
||||
Valid: false,
|
||||
SigningKeyId: null,
|
||||
CreatedAt: attestation.CreatedAt,
|
||||
Warnings: null,
|
||||
Error: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<JobAttestation> CreateAttestationAsync<TPredicate>(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
Guid? runId,
|
||||
string? projectId,
|
||||
string predicateType,
|
||||
IReadOnlyList<InTotoSubject> subjects,
|
||||
TPredicate predicate,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Create in-toto statement
|
||||
var predicateJson = JsonSerializer.SerializeToElement(predicate, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
});
|
||||
|
||||
var statement = new InTotoStatement(
|
||||
Type: JobAttestation.InTotoStatementV1,
|
||||
Subject: subjects,
|
||||
PredicateType: predicateType,
|
||||
Predicate: predicateJson);
|
||||
|
||||
// Serialize to canonical JSON
|
||||
var payloadBytes = statement.ToCanonicalJson();
|
||||
|
||||
// Sign the payload
|
||||
var envelope = await _signer.SignAsync(
|
||||
payloadBytes,
|
||||
DsseEnvelope.InTotoPayloadType,
|
||||
cancellationToken);
|
||||
|
||||
// Create attestation record
|
||||
var attestationId = Guid.NewGuid();
|
||||
var payloadDigest = "sha256:" + Convert.ToHexString(SHA256.HashData(payloadBytes)).ToLowerInvariant();
|
||||
|
||||
return new JobAttestation(
|
||||
AttestationId: attestationId,
|
||||
TenantId: tenantId,
|
||||
JobId: jobId,
|
||||
RunId: runId,
|
||||
ProjectId: projectId,
|
||||
StatementType: JobAttestation.InTotoStatementV1,
|
||||
PredicateType: predicateType,
|
||||
Subjects: subjects.Select(s => new AttestationSubject(s.Name, s.Digest)).ToList(),
|
||||
Envelope: envelope,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
PayloadDigest: payloadDigest,
|
||||
EvidencePointer: null);
|
||||
}
|
||||
|
||||
private async Task EmitAttestationEventAsync(
|
||||
JobAttestation attestation,
|
||||
string eventType,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var payload = new
|
||||
{
|
||||
attestationId = attestation.AttestationId,
|
||||
predicateType = attestation.PredicateType,
|
||||
payloadDigest = attestation.PayloadDigest,
|
||||
subjectCount = attestation.Subjects.Count
|
||||
};
|
||||
|
||||
if (attestation.JobId != Guid.Empty)
|
||||
{
|
||||
await _timelineEmitter.EmitJobEventAsync(
|
||||
attestation.TenantId,
|
||||
attestation.JobId,
|
||||
eventType,
|
||||
payload,
|
||||
projectId: attestation.ProjectId,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
else if (attestation.RunId.HasValue)
|
||||
{
|
||||
await _timelineEmitter.EmitRunEventAsync(
|
||||
attestation.TenantId,
|
||||
attestation.RunId.Value,
|
||||
eventType,
|
||||
payload,
|
||||
projectId: attestation.ProjectId,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputePayloadHash(string? payload)
|
||||
{
|
||||
if (string.IsNullOrEmpty(payload))
|
||||
return string.Empty;
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(payload));
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeSubjectDigest(Guid jobId, string tenantId, string jobType)
|
||||
{
|
||||
var data = $"{tenantId}:{jobId}:{jobType}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(data));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeRunDigest(
|
||||
Guid runId,
|
||||
string tenantId,
|
||||
IReadOnlyList<JobAttestation> jobAttestations)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.Append($"{tenantId}:{runId}:");
|
||||
foreach (var att in jobAttestations.OrderBy(a => a.JobId))
|
||||
{
|
||||
builder.Append($"{att.JobId}:{att.PayloadDigest};");
|
||||
}
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of job attestation store for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryJobAttestationStore : IJobAttestationStore
|
||||
{
|
||||
private readonly Dictionary<Guid, JobAttestation> _attestations = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public int Count
|
||||
{
|
||||
get
|
||||
{
|
||||
lock (_lock)
|
||||
return _attestations.Count;
|
||||
}
|
||||
}
|
||||
|
||||
public Task StoreAsync(JobAttestation attestation, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_attestations[attestation.AttestationId] = attestation;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<JobAttestation?> GetAsync(Guid attestationId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return Task.FromResult(_attestations.GetValueOrDefault(attestationId));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<JobAttestation>> GetForJobAsync(Guid jobId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _attestations.Values
|
||||
.Where(a => a.JobId == jobId)
|
||||
.OrderByDescending(a => a.CreatedAt)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<JobAttestation>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<JobAttestation>> GetForRunAsync(Guid runId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _attestations.Values
|
||||
.Where(a => a.RunId == runId)
|
||||
.OrderByDescending(a => a.CreatedAt)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<JobAttestation>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
public void Clear()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_attestations.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test implementation of job attestation signer using HMAC.
|
||||
/// </summary>
|
||||
public sealed class HmacJobAttestationSigner : IJobAttestationSigner
|
||||
{
|
||||
private readonly byte[] _key;
|
||||
private readonly string _keyId;
|
||||
|
||||
public HmacJobAttestationSigner(byte[]? key = null, string? keyId = null)
|
||||
{
|
||||
_key = key ?? RandomNumberGenerator.GetBytes(32);
|
||||
_keyId = keyId ?? "hmac-key-" + Convert.ToHexString(_key[..4]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public string GetCurrentKeyId() => _keyId;
|
||||
|
||||
public Task<DsseEnvelope> SignAsync(
|
||||
byte[] payload,
|
||||
string payloadType,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Compute PAE (Pre-Authentication Encoding) per DSSE spec
|
||||
var pae = ComputePae(payloadType, payload);
|
||||
|
||||
// Sign PAE
|
||||
using var hmac = new HMACSHA256(_key);
|
||||
var signature = hmac.ComputeHash(pae);
|
||||
|
||||
var envelope = new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String(payload),
|
||||
PayloadType: payloadType,
|
||||
Signatures: new[] { new DsseSignature(_keyId, Convert.ToBase64String(signature)) });
|
||||
|
||||
return Task.FromResult(envelope);
|
||||
}
|
||||
|
||||
public Task<bool> VerifyAsync(
|
||||
DsseEnvelope envelope,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var payload = envelope.DecodePayload();
|
||||
var pae = ComputePae(envelope.PayloadType, payload);
|
||||
|
||||
using var hmac = new HMACSHA256(_key);
|
||||
var expectedSignature = hmac.ComputeHash(pae);
|
||||
|
||||
foreach (var sig in envelope.Signatures)
|
||||
{
|
||||
if (sig.KeyId == _keyId)
|
||||
{
|
||||
var actualSignature = Convert.FromBase64String(sig.Sig);
|
||||
if (CryptographicOperations.FixedTimeEquals(expectedSignature, actualSignature))
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes PAE (Pre-Authentication Encoding) per DSSE spec.
|
||||
/// Format: "DSSEv1" SP LEN(type) SP type SP LEN(payload) SP payload
|
||||
/// </summary>
|
||||
private static byte[] ComputePae(string payloadType, byte[] payload)
|
||||
{
|
||||
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
|
||||
using var ms = new MemoryStream();
|
||||
using var writer = new BinaryWriter(ms, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
// "DSSEv1 "
|
||||
writer.Write(Encoding.UTF8.GetBytes("DSSEv1 "));
|
||||
|
||||
// LEN(type) SP type SP
|
||||
writer.Write(Encoding.UTF8.GetBytes(typeBytes.Length.ToString()));
|
||||
writer.Write((byte)' ');
|
||||
writer.Write(typeBytes);
|
||||
writer.Write((byte)' ');
|
||||
|
||||
// LEN(payload) SP payload
|
||||
writer.Write(Encoding.UTF8.GetBytes(payload.Length.ToString()));
|
||||
writer.Write((byte)' ');
|
||||
writer.Write(payload);
|
||||
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// No-op attestation signer for testing without signing.
|
||||
/// </summary>
|
||||
public sealed class NoOpJobAttestationSigner : IJobAttestationSigner
|
||||
{
|
||||
public static NoOpJobAttestationSigner Instance { get; } = new();
|
||||
|
||||
private NoOpJobAttestationSigner() { }
|
||||
|
||||
public string GetCurrentKeyId() => "no-op";
|
||||
|
||||
public Task<DsseEnvelope> SignAsync(
|
||||
byte[] payload,
|
||||
string payloadType,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Return unsigned envelope (empty signature placeholder)
|
||||
var envelope = new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String(payload),
|
||||
PayloadType: payloadType,
|
||||
Signatures: new[] { new DsseSignature("no-op", Convert.ToBase64String(new byte[32])) });
|
||||
|
||||
return Task.FromResult(envelope);
|
||||
}
|
||||
|
||||
public Task<bool> VerifyAsync(
|
||||
DsseEnvelope envelope,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Always returns true for testing
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,425 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Evidence capsule for orchestrator-scheduled jobs containing all materials for Evidence Locker.
|
||||
/// Per ORCH-OBS-53-001.
|
||||
/// </summary>
|
||||
public sealed record JobCapsule(
|
||||
/// <summary>Unique capsule identifier.</summary>
|
||||
Guid CapsuleId,
|
||||
|
||||
/// <summary>Tenant scope.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Project scope within tenant.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Job identifier.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Run identifier if associated with a run.</summary>
|
||||
Guid? RunId,
|
||||
|
||||
/// <summary>Job type identifier.</summary>
|
||||
string JobType,
|
||||
|
||||
/// <summary>Capsule kind.</summary>
|
||||
JobCapsuleKind Kind,
|
||||
|
||||
/// <summary>When the capsule was created.</summary>
|
||||
DateTimeOffset CreatedAt,
|
||||
|
||||
/// <summary>Schema version for forward compatibility.</summary>
|
||||
string SchemaVersion,
|
||||
|
||||
/// <summary>Job input parameters (redacted).</summary>
|
||||
JobCapsuleInputs Inputs,
|
||||
|
||||
/// <summary>Job outputs and results.</summary>
|
||||
JobCapsuleOutputs? Outputs,
|
||||
|
||||
/// <summary>Artifacts produced by the job.</summary>
|
||||
IReadOnlyList<JobCapsuleArtifact>? Artifacts,
|
||||
|
||||
/// <summary>Timeline events associated with the job.</summary>
|
||||
IReadOnlyList<JobCapsuleTimelineEntry>? TimelineEntries,
|
||||
|
||||
/// <summary>Policy evaluations applied to the job.</summary>
|
||||
IReadOnlyList<JobCapsulePolicyResult>? PolicyResults,
|
||||
|
||||
/// <summary>Root hash of all materials (Merkle root).</summary>
|
||||
string RootHash,
|
||||
|
||||
/// <summary>Additional metadata.</summary>
|
||||
IReadOnlyDictionary<string, string>? Metadata)
|
||||
{
|
||||
/// <summary>Current schema version.</summary>
|
||||
public const string CurrentSchemaVersion = "1.0.0";
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new job capsule.
|
||||
/// </summary>
|
||||
public static JobCapsule Create(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
string jobType,
|
||||
JobCapsuleKind kind,
|
||||
JobCapsuleInputs inputs,
|
||||
JobCapsuleOutputs? outputs = null,
|
||||
IReadOnlyList<JobCapsuleArtifact>? artifacts = null,
|
||||
IReadOnlyList<JobCapsuleTimelineEntry>? timelineEntries = null,
|
||||
IReadOnlyList<JobCapsulePolicyResult>? policyResults = null,
|
||||
string? projectId = null,
|
||||
Guid? runId = null,
|
||||
IReadOnlyDictionary<string, string>? metadata = null)
|
||||
{
|
||||
var capsuleId = Guid.NewGuid();
|
||||
var createdAt = DateTimeOffset.UtcNow;
|
||||
|
||||
// Compute root hash from all materials
|
||||
var rootHash = ComputeRootHash(
|
||||
capsuleId, tenantId, jobId, jobType, kind, inputs, outputs, artifacts, timelineEntries, policyResults);
|
||||
|
||||
return new JobCapsule(
|
||||
CapsuleId: capsuleId,
|
||||
TenantId: tenantId,
|
||||
ProjectId: projectId,
|
||||
JobId: jobId,
|
||||
RunId: runId,
|
||||
JobType: jobType,
|
||||
Kind: kind,
|
||||
CreatedAt: createdAt,
|
||||
SchemaVersion: CurrentSchemaVersion,
|
||||
Inputs: inputs,
|
||||
Outputs: outputs,
|
||||
Artifacts: artifacts,
|
||||
TimelineEntries: timelineEntries,
|
||||
PolicyResults: policyResults,
|
||||
RootHash: rootHash,
|
||||
Metadata: metadata);
|
||||
}
|
||||
|
||||
/// <summary>Serializes the capsule to JSON.</summary>
|
||||
public string ToJson() => JsonSerializer.Serialize(this, JsonOptions);
|
||||
|
||||
/// <summary>Deserializes a capsule from JSON.</summary>
|
||||
public static JobCapsule? FromJson(string json)
|
||||
=> JsonSerializer.Deserialize<JobCapsule>(json, JsonOptions);
|
||||
|
||||
/// <summary>Creates an evidence pointer for this capsule.</summary>
|
||||
public EvidencePointer ToEvidencePointer()
|
||||
=> EvidencePointer.Bundle(CapsuleId, RootHash);
|
||||
|
||||
private static string ComputeRootHash(
|
||||
Guid capsuleId,
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
string jobType,
|
||||
JobCapsuleKind kind,
|
||||
JobCapsuleInputs inputs,
|
||||
JobCapsuleOutputs? outputs,
|
||||
IReadOnlyList<JobCapsuleArtifact>? artifacts,
|
||||
IReadOnlyList<JobCapsuleTimelineEntry>? timelineEntries,
|
||||
IReadOnlyList<JobCapsulePolicyResult>? policyResults)
|
||||
{
|
||||
var hashBuilder = new StringBuilder();
|
||||
hashBuilder.Append(capsuleId);
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(tenantId);
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(jobId);
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(jobType);
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(kind);
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(inputs.PayloadHash);
|
||||
|
||||
if (outputs is not null)
|
||||
{
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(outputs.ResultHash);
|
||||
}
|
||||
|
||||
if (artifacts is not null)
|
||||
{
|
||||
foreach (var artifact in artifacts.OrderBy(a => a.Name))
|
||||
{
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(artifact.Digest);
|
||||
}
|
||||
}
|
||||
|
||||
if (timelineEntries is not null)
|
||||
{
|
||||
foreach (var entry in timelineEntries.OrderBy(e => e.OccurredAt))
|
||||
{
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(entry.EventId);
|
||||
}
|
||||
}
|
||||
|
||||
if (policyResults is not null)
|
||||
{
|
||||
foreach (var result in policyResults.OrderBy(r => r.PolicyName))
|
||||
{
|
||||
hashBuilder.Append('|');
|
||||
hashBuilder.Append(result.EvaluationHash);
|
||||
}
|
||||
}
|
||||
|
||||
var bytes = Encoding.UTF8.GetBytes(hashBuilder.ToString());
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Kind of job capsule.
|
||||
/// </summary>
|
||||
public enum JobCapsuleKind
|
||||
{
|
||||
/// <summary>Job scheduling capsule.</summary>
|
||||
JobScheduling,
|
||||
|
||||
/// <summary>Job completion capsule.</summary>
|
||||
JobCompletion,
|
||||
|
||||
/// <summary>Job failure capsule.</summary>
|
||||
JobFailure,
|
||||
|
||||
/// <summary>Job cancellation capsule.</summary>
|
||||
JobCancellation,
|
||||
|
||||
/// <summary>Run completion capsule.</summary>
|
||||
RunCompletion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Job input parameters for capsule (redacted).
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleInputs(
|
||||
/// <summary>Job payload hash (original payload redacted).</summary>
|
||||
string PayloadHash,
|
||||
|
||||
/// <summary>Scheduling parameters.</summary>
|
||||
JobCapsuleSchedulingParams? SchedulingParams,
|
||||
|
||||
/// <summary>Source reference (e.g., schedule ID, trigger).</summary>
|
||||
JobCapsuleSourceRef? SourceRef,
|
||||
|
||||
/// <summary>Dependencies this job required.</summary>
|
||||
IReadOnlyList<JobCapsuleDependency>? Dependencies,
|
||||
|
||||
/// <summary>Environment context (redacted).</summary>
|
||||
JobCapsuleEnvironment? Environment)
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates inputs from a job payload.
|
||||
/// </summary>
|
||||
public static JobCapsuleInputs FromPayload(
|
||||
string payloadJson,
|
||||
JobCapsuleSchedulingParams? schedulingParams = null,
|
||||
JobCapsuleSourceRef? sourceRef = null,
|
||||
IReadOnlyList<JobCapsuleDependency>? dependencies = null,
|
||||
JobCapsuleEnvironment? environment = null)
|
||||
{
|
||||
var payloadHash = ComputeHash(payloadJson);
|
||||
return new JobCapsuleInputs(payloadHash, schedulingParams, sourceRef, dependencies, environment);
|
||||
}
|
||||
|
||||
private static string ComputeHash(string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Scheduling parameters for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleSchedulingParams(
|
||||
/// <summary>Requested priority.</summary>
|
||||
int? Priority,
|
||||
|
||||
/// <summary>Requested deadline.</summary>
|
||||
DateTimeOffset? Deadline,
|
||||
|
||||
/// <summary>Retry policy name.</summary>
|
||||
string? RetryPolicy,
|
||||
|
||||
/// <summary>Maximum retry attempts.</summary>
|
||||
int? MaxRetries,
|
||||
|
||||
/// <summary>Timeout in seconds.</summary>
|
||||
int? TimeoutSeconds,
|
||||
|
||||
/// <summary>Queue name.</summary>
|
||||
string? QueueName);
|
||||
|
||||
/// <summary>
|
||||
/// Source reference for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleSourceRef(
|
||||
/// <summary>Source type (schedule, trigger, api, etc.).</summary>
|
||||
string SourceType,
|
||||
|
||||
/// <summary>Source identifier.</summary>
|
||||
string? SourceId,
|
||||
|
||||
/// <summary>Triggering actor.</summary>
|
||||
string? Actor,
|
||||
|
||||
/// <summary>Request trace ID.</summary>
|
||||
string? TraceId);
|
||||
|
||||
/// <summary>
|
||||
/// Dependency record for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleDependency(
|
||||
/// <summary>Dependency type.</summary>
|
||||
string DependencyType,
|
||||
|
||||
/// <summary>Dependency identifier.</summary>
|
||||
string DependencyId,
|
||||
|
||||
/// <summary>Dependency version or digest.</summary>
|
||||
string? Version,
|
||||
|
||||
/// <summary>Whether dependency was satisfied.</summary>
|
||||
bool Satisfied);
|
||||
|
||||
/// <summary>
|
||||
/// Environment context for job capsule (redacted).
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleEnvironment(
|
||||
/// <summary>Hash of environment variables.</summary>
|
||||
string? EnvHash,
|
||||
|
||||
/// <summary>Worker node identifier.</summary>
|
||||
string? WorkerNode,
|
||||
|
||||
/// <summary>Runtime version.</summary>
|
||||
string? RuntimeVersion,
|
||||
|
||||
/// <summary>Air-gap sealed mode flag.</summary>
|
||||
bool IsSealed,
|
||||
|
||||
/// <summary>Staleness at execution time (seconds).</summary>
|
||||
int? StalenessSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Job outputs for capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleOutputs(
|
||||
/// <summary>Job status at completion.</summary>
|
||||
string Status,
|
||||
|
||||
/// <summary>Exit code if applicable.</summary>
|
||||
int? ExitCode,
|
||||
|
||||
/// <summary>Result summary.</summary>
|
||||
string? ResultSummary,
|
||||
|
||||
/// <summary>Hash of result payload.</summary>
|
||||
string? ResultHash,
|
||||
|
||||
/// <summary>Duration in seconds.</summary>
|
||||
double DurationSeconds,
|
||||
|
||||
/// <summary>Retry count.</summary>
|
||||
int RetryCount,
|
||||
|
||||
/// <summary>Error details if failed.</summary>
|
||||
JobCapsuleError? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Error details for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleError(
|
||||
/// <summary>Error code.</summary>
|
||||
string Code,
|
||||
|
||||
/// <summary>Error message (redacted).</summary>
|
||||
string Message,
|
||||
|
||||
/// <summary>Error category.</summary>
|
||||
string? Category,
|
||||
|
||||
/// <summary>Whether error is retryable.</summary>
|
||||
bool Retryable);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact record for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleArtifact(
|
||||
/// <summary>Artifact name.</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Artifact digest.</summary>
|
||||
string Digest,
|
||||
|
||||
/// <summary>Artifact size in bytes.</summary>
|
||||
long SizeBytes,
|
||||
|
||||
/// <summary>Media type.</summary>
|
||||
string? MediaType,
|
||||
|
||||
/// <summary>Storage location.</summary>
|
||||
string? StorageUri,
|
||||
|
||||
/// <summary>Additional attributes.</summary>
|
||||
IReadOnlyDictionary<string, string>? Attributes);
|
||||
|
||||
/// <summary>
|
||||
/// Timeline entry for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleTimelineEntry(
|
||||
/// <summary>Event identifier.</summary>
|
||||
Guid EventId,
|
||||
|
||||
/// <summary>Event type.</summary>
|
||||
string EventType,
|
||||
|
||||
/// <summary>When event occurred.</summary>
|
||||
DateTimeOffset OccurredAt,
|
||||
|
||||
/// <summary>Event severity.</summary>
|
||||
string Severity,
|
||||
|
||||
/// <summary>Event summary.</summary>
|
||||
string? Summary,
|
||||
|
||||
/// <summary>Payload hash.</summary>
|
||||
string? PayloadHash);
|
||||
|
||||
/// <summary>
|
||||
/// Policy evaluation result for job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsulePolicyResult(
|
||||
/// <summary>Policy name.</summary>
|
||||
string PolicyName,
|
||||
|
||||
/// <summary>Policy version.</summary>
|
||||
string? PolicyVersion,
|
||||
|
||||
/// <summary>Evaluation result (allow, deny, warn).</summary>
|
||||
string Result,
|
||||
|
||||
/// <summary>Hash of evaluation inputs and outputs.</summary>
|
||||
string EvaluationHash,
|
||||
|
||||
/// <summary>Violations if any.</summary>
|
||||
IReadOnlyList<string>? Violations);
|
||||
@@ -0,0 +1,551 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Service for generating job capsules for Evidence Locker.
|
||||
/// Per ORCH-OBS-53-001.
|
||||
/// </summary>
|
||||
public interface IJobCapsuleGenerator
|
||||
{
|
||||
/// <summary>
|
||||
/// Generates a job scheduling capsule.
|
||||
/// </summary>
|
||||
Task<JobCapsuleResult> GenerateJobSchedulingCapsuleAsync(
|
||||
JobCapsuleRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates a job completion capsule.
|
||||
/// </summary>
|
||||
Task<JobCapsuleResult> GenerateJobCompletionCapsuleAsync(
|
||||
JobCapsuleRequest request,
|
||||
JobCapsuleOutputs outputs,
|
||||
IReadOnlyList<JobCapsuleArtifact>? artifacts = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates a job failure capsule.
|
||||
/// </summary>
|
||||
Task<JobCapsuleResult> GenerateJobFailureCapsuleAsync(
|
||||
JobCapsuleRequest request,
|
||||
JobCapsuleError error,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates a run completion capsule.
|
||||
/// </summary>
|
||||
Task<JobCapsuleResult> GenerateRunCompletionCapsuleAsync(
|
||||
string tenantId,
|
||||
Guid runId,
|
||||
string? projectId,
|
||||
IReadOnlyList<JobCapsule> jobCapsules,
|
||||
IReadOnlyDictionary<string, string>? metadata = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for generating a job capsule.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleRequest(
|
||||
/// <summary>Tenant scope.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Job identifier.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Job type.</summary>
|
||||
string JobType,
|
||||
|
||||
/// <summary>Job payload JSON.</summary>
|
||||
string PayloadJson,
|
||||
|
||||
/// <summary>Project scope.</summary>
|
||||
string? ProjectId = null,
|
||||
|
||||
/// <summary>Run identifier.</summary>
|
||||
Guid? RunId = null,
|
||||
|
||||
/// <summary>Scheduling parameters.</summary>
|
||||
JobCapsuleSchedulingParams? SchedulingParams = null,
|
||||
|
||||
/// <summary>Source reference.</summary>
|
||||
JobCapsuleSourceRef? SourceRef = null,
|
||||
|
||||
/// <summary>Dependencies.</summary>
|
||||
IReadOnlyList<JobCapsuleDependency>? Dependencies = null,
|
||||
|
||||
/// <summary>Environment context.</summary>
|
||||
JobCapsuleEnvironment? Environment = null,
|
||||
|
||||
/// <summary>Timeline events to include.</summary>
|
||||
IReadOnlyList<TimelineEvent>? TimelineEvents = null,
|
||||
|
||||
/// <summary>Policy results.</summary>
|
||||
IReadOnlyList<JobCapsulePolicyResult>? PolicyResults = null,
|
||||
|
||||
/// <summary>Additional metadata.</summary>
|
||||
IReadOnlyDictionary<string, string>? Metadata = null);
|
||||
|
||||
/// <summary>
|
||||
/// Result of job capsule generation.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleResult(
|
||||
/// <summary>Whether generation was successful.</summary>
|
||||
bool Success,
|
||||
|
||||
/// <summary>The generated capsule.</summary>
|
||||
JobCapsule? Capsule,
|
||||
|
||||
/// <summary>Evidence pointer for timeline events.</summary>
|
||||
EvidencePointer? EvidencePointer,
|
||||
|
||||
/// <summary>Error message if generation failed.</summary>
|
||||
string? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of job capsule generator.
|
||||
/// </summary>
|
||||
public sealed class JobCapsuleGenerator : IJobCapsuleGenerator
|
||||
{
|
||||
private readonly IJobRedactionGuard _redactionGuard;
|
||||
private readonly IJobCapsuleStore _store;
|
||||
private readonly ITimelineEventEmitter? _timelineEmitter;
|
||||
private readonly ISnapshotHookInvoker? _snapshotHooks;
|
||||
private readonly ILogger<JobCapsuleGenerator> _logger;
|
||||
private readonly JobCapsuleGeneratorOptions _options;
|
||||
|
||||
public JobCapsuleGenerator(
|
||||
IJobRedactionGuard redactionGuard,
|
||||
IJobCapsuleStore store,
|
||||
ILogger<JobCapsuleGenerator> logger,
|
||||
ITimelineEventEmitter? timelineEmitter = null,
|
||||
ISnapshotHookInvoker? snapshotHooks = null,
|
||||
JobCapsuleGeneratorOptions? options = null)
|
||||
{
|
||||
_redactionGuard = redactionGuard ?? throw new ArgumentNullException(nameof(redactionGuard));
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timelineEmitter = timelineEmitter;
|
||||
_snapshotHooks = snapshotHooks;
|
||||
_options = options ?? JobCapsuleGeneratorOptions.Default;
|
||||
}
|
||||
|
||||
public async Task<JobCapsuleResult> GenerateJobSchedulingCapsuleAsync(
|
||||
JobCapsuleRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Redact payload
|
||||
var redactedPayload = _redactionGuard.RedactPayload(request.PayloadJson);
|
||||
var inputs = JobCapsuleInputs.FromPayload(
|
||||
redactedPayload,
|
||||
request.SchedulingParams,
|
||||
request.SourceRef,
|
||||
request.Dependencies,
|
||||
_redactionGuard.RedactEnvironment(request.Environment));
|
||||
|
||||
// Convert timeline events
|
||||
var timelineEntries = ConvertTimelineEvents(request.TimelineEvents);
|
||||
|
||||
// Invoke pre-snapshot hooks
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePreSnapshotAsync(
|
||||
request.TenantId, request.JobId, JobCapsuleKind.JobScheduling, cancellationToken);
|
||||
}
|
||||
|
||||
// Create capsule
|
||||
var capsule = JobCapsule.Create(
|
||||
tenantId: request.TenantId,
|
||||
jobId: request.JobId,
|
||||
jobType: request.JobType,
|
||||
kind: JobCapsuleKind.JobScheduling,
|
||||
inputs: inputs,
|
||||
timelineEntries: timelineEntries,
|
||||
policyResults: request.PolicyResults,
|
||||
projectId: request.ProjectId,
|
||||
runId: request.RunId,
|
||||
metadata: request.Metadata);
|
||||
|
||||
// Store capsule
|
||||
await _store.StoreAsync(capsule, cancellationToken);
|
||||
|
||||
// Invoke post-snapshot hooks
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePostSnapshotAsync(
|
||||
request.TenantId, request.JobId, capsule, cancellationToken);
|
||||
}
|
||||
|
||||
// Emit timeline event
|
||||
if (_timelineEmitter is not null && _options.EmitTimelineEvents)
|
||||
{
|
||||
await _timelineEmitter.EmitJobEventAsync(
|
||||
request.TenantId,
|
||||
request.JobId,
|
||||
"job.evidence.captured",
|
||||
payload: new { capsuleId = capsule.CapsuleId, kind = "scheduling", rootHash = capsule.RootHash },
|
||||
projectId: request.ProjectId,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generated job scheduling capsule {CapsuleId} for job {JobId}, root hash {RootHash}",
|
||||
capsule.CapsuleId, request.JobId, capsule.RootHash);
|
||||
|
||||
return new JobCapsuleResult(
|
||||
Success: true,
|
||||
Capsule: capsule,
|
||||
EvidencePointer: capsule.ToEvidencePointer(),
|
||||
Error: null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate job scheduling capsule for job {JobId}", request.JobId);
|
||||
return new JobCapsuleResult(Success: false, Capsule: null, EvidencePointer: null, Error: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<JobCapsuleResult> GenerateJobCompletionCapsuleAsync(
|
||||
JobCapsuleRequest request,
|
||||
JobCapsuleOutputs outputs,
|
||||
IReadOnlyList<JobCapsuleArtifact>? artifacts = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var redactedPayload = _redactionGuard.RedactPayload(request.PayloadJson);
|
||||
var inputs = JobCapsuleInputs.FromPayload(
|
||||
redactedPayload,
|
||||
request.SchedulingParams,
|
||||
request.SourceRef,
|
||||
request.Dependencies,
|
||||
_redactionGuard.RedactEnvironment(request.Environment));
|
||||
|
||||
var timelineEntries = ConvertTimelineEvents(request.TimelineEvents);
|
||||
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePreSnapshotAsync(
|
||||
request.TenantId, request.JobId, JobCapsuleKind.JobCompletion, cancellationToken);
|
||||
}
|
||||
|
||||
var capsule = JobCapsule.Create(
|
||||
tenantId: request.TenantId,
|
||||
jobId: request.JobId,
|
||||
jobType: request.JobType,
|
||||
kind: JobCapsuleKind.JobCompletion,
|
||||
inputs: inputs,
|
||||
outputs: outputs,
|
||||
artifacts: artifacts,
|
||||
timelineEntries: timelineEntries,
|
||||
policyResults: request.PolicyResults,
|
||||
projectId: request.ProjectId,
|
||||
runId: request.RunId,
|
||||
metadata: request.Metadata);
|
||||
|
||||
await _store.StoreAsync(capsule, cancellationToken);
|
||||
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePostSnapshotAsync(
|
||||
request.TenantId, request.JobId, capsule, cancellationToken);
|
||||
}
|
||||
|
||||
if (_timelineEmitter is not null && _options.EmitTimelineEvents)
|
||||
{
|
||||
await _timelineEmitter.EmitJobEventAsync(
|
||||
request.TenantId,
|
||||
request.JobId,
|
||||
"job.evidence.captured",
|
||||
payload: new { capsuleId = capsule.CapsuleId, kind = "completion", rootHash = capsule.RootHash },
|
||||
projectId: request.ProjectId,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generated job completion capsule {CapsuleId} for job {JobId}, root hash {RootHash}",
|
||||
capsule.CapsuleId, request.JobId, capsule.RootHash);
|
||||
|
||||
return new JobCapsuleResult(
|
||||
Success: true,
|
||||
Capsule: capsule,
|
||||
EvidencePointer: capsule.ToEvidencePointer(),
|
||||
Error: null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate job completion capsule for job {JobId}", request.JobId);
|
||||
return new JobCapsuleResult(Success: false, Capsule: null, EvidencePointer: null, Error: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<JobCapsuleResult> GenerateJobFailureCapsuleAsync(
|
||||
JobCapsuleRequest request,
|
||||
JobCapsuleError error,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var redactedPayload = _redactionGuard.RedactPayload(request.PayloadJson);
|
||||
var inputs = JobCapsuleInputs.FromPayload(
|
||||
redactedPayload,
|
||||
request.SchedulingParams,
|
||||
request.SourceRef,
|
||||
request.Dependencies,
|
||||
_redactionGuard.RedactEnvironment(request.Environment));
|
||||
|
||||
var redactedError = _redactionGuard.RedactError(error);
|
||||
|
||||
var outputs = new JobCapsuleOutputs(
|
||||
Status: "failed",
|
||||
ExitCode: null,
|
||||
ResultSummary: redactedError.Message,
|
||||
ResultHash: null,
|
||||
DurationSeconds: 0,
|
||||
RetryCount: 0,
|
||||
Error: redactedError);
|
||||
|
||||
var timelineEntries = ConvertTimelineEvents(request.TimelineEvents);
|
||||
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePreSnapshotAsync(
|
||||
request.TenantId, request.JobId, JobCapsuleKind.JobFailure, cancellationToken);
|
||||
}
|
||||
|
||||
var capsule = JobCapsule.Create(
|
||||
tenantId: request.TenantId,
|
||||
jobId: request.JobId,
|
||||
jobType: request.JobType,
|
||||
kind: JobCapsuleKind.JobFailure,
|
||||
inputs: inputs,
|
||||
outputs: outputs,
|
||||
timelineEntries: timelineEntries,
|
||||
policyResults: request.PolicyResults,
|
||||
projectId: request.ProjectId,
|
||||
runId: request.RunId,
|
||||
metadata: request.Metadata);
|
||||
|
||||
await _store.StoreAsync(capsule, cancellationToken);
|
||||
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePostSnapshotAsync(
|
||||
request.TenantId, request.JobId, capsule, cancellationToken);
|
||||
}
|
||||
|
||||
if (_timelineEmitter is not null && _options.EmitTimelineEvents)
|
||||
{
|
||||
await _timelineEmitter.EmitJobEventAsync(
|
||||
request.TenantId,
|
||||
request.JobId,
|
||||
"job.evidence.captured",
|
||||
payload: new { capsuleId = capsule.CapsuleId, kind = "failure", rootHash = capsule.RootHash },
|
||||
projectId: request.ProjectId,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generated job failure capsule {CapsuleId} for job {JobId}, root hash {RootHash}",
|
||||
capsule.CapsuleId, request.JobId, capsule.RootHash);
|
||||
|
||||
return new JobCapsuleResult(
|
||||
Success: true,
|
||||
Capsule: capsule,
|
||||
EvidencePointer: capsule.ToEvidencePointer(),
|
||||
Error: null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate job failure capsule for job {JobId}", request.JobId);
|
||||
return new JobCapsuleResult(Success: false, Capsule: null, EvidencePointer: null, Error: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<JobCapsuleResult> GenerateRunCompletionCapsuleAsync(
|
||||
string tenantId,
|
||||
Guid runId,
|
||||
string? projectId,
|
||||
IReadOnlyList<JobCapsule> jobCapsules,
|
||||
IReadOnlyDictionary<string, string>? metadata = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Create a summary capsule for the run
|
||||
var jobRefs = jobCapsules.Select(c => new JobCapsuleArtifact(
|
||||
Name: $"job-{c.JobId}",
|
||||
Digest: c.RootHash,
|
||||
SizeBytes: 0,
|
||||
MediaType: "application/vnd.stellaops.capsule+json",
|
||||
StorageUri: null,
|
||||
Attributes: new Dictionary<string, string>
|
||||
{
|
||||
["capsuleId"] = c.CapsuleId.ToString(),
|
||||
["jobType"] = c.JobType,
|
||||
["kind"] = c.Kind.ToString()
|
||||
})).ToList();
|
||||
|
||||
var inputs = new JobCapsuleInputs(
|
||||
PayloadHash: $"run:{runId}",
|
||||
SchedulingParams: null,
|
||||
SourceRef: new JobCapsuleSourceRef("run", runId.ToString(), null, null),
|
||||
Dependencies: null,
|
||||
Environment: null);
|
||||
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePreSnapshotAsync(
|
||||
tenantId, runId, JobCapsuleKind.RunCompletion, cancellationToken);
|
||||
}
|
||||
|
||||
var capsule = JobCapsule.Create(
|
||||
tenantId: tenantId,
|
||||
jobId: runId, // Use runId as the "job" ID for run capsules
|
||||
jobType: "run.completion",
|
||||
kind: JobCapsuleKind.RunCompletion,
|
||||
inputs: inputs,
|
||||
artifacts: jobRefs,
|
||||
projectId: projectId,
|
||||
runId: runId,
|
||||
metadata: metadata);
|
||||
|
||||
await _store.StoreAsync(capsule, cancellationToken);
|
||||
|
||||
if (_snapshotHooks is not null)
|
||||
{
|
||||
await _snapshotHooks.InvokePostSnapshotAsync(tenantId, runId, capsule, cancellationToken);
|
||||
}
|
||||
|
||||
if (_timelineEmitter is not null && _options.EmitTimelineEvents)
|
||||
{
|
||||
await _timelineEmitter.EmitRunEventAsync(
|
||||
tenantId,
|
||||
runId,
|
||||
"run.evidence.captured",
|
||||
payload: new { capsuleId = capsule.CapsuleId, kind = "completion", rootHash = capsule.RootHash, jobCount = jobCapsules.Count },
|
||||
projectId: projectId,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generated run completion capsule {CapsuleId} for run {RunId} with {JobCount} jobs, root hash {RootHash}",
|
||||
capsule.CapsuleId, runId, jobCapsules.Count, capsule.RootHash);
|
||||
|
||||
return new JobCapsuleResult(
|
||||
Success: true,
|
||||
Capsule: capsule,
|
||||
EvidencePointer: capsule.ToEvidencePointer(),
|
||||
Error: null);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to generate run completion capsule for run {RunId}", runId);
|
||||
return new JobCapsuleResult(Success: false, Capsule: null, EvidencePointer: null, Error: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static IReadOnlyList<JobCapsuleTimelineEntry>? ConvertTimelineEvents(
|
||||
IReadOnlyList<TimelineEvent>? events)
|
||||
{
|
||||
if (events is null || events.Count == 0)
|
||||
return null;
|
||||
|
||||
return events.Select(e => new JobCapsuleTimelineEntry(
|
||||
EventId: e.EventId,
|
||||
EventType: e.EventType,
|
||||
OccurredAt: e.OccurredAt,
|
||||
Severity: e.Severity.ToString(),
|
||||
Summary: null,
|
||||
PayloadHash: e.PayloadHash)).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for job capsule generator.
|
||||
/// </summary>
|
||||
public sealed record JobCapsuleGeneratorOptions(
|
||||
/// <summary>Whether to emit timeline events for capsule generation.</summary>
|
||||
bool EmitTimelineEvents,
|
||||
|
||||
/// <summary>Whether to invoke snapshot hooks.</summary>
|
||||
bool InvokeSnapshotHooks,
|
||||
|
||||
/// <summary>Maximum artifact size to include inline.</summary>
|
||||
long MaxInlineArtifactSize)
|
||||
{
|
||||
/// <summary>Default options.</summary>
|
||||
public static JobCapsuleGeneratorOptions Default => new(
|
||||
EmitTimelineEvents: true,
|
||||
InvokeSnapshotHooks: true,
|
||||
MaxInlineArtifactSize: 64 * 1024);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Store for job capsules.
|
||||
/// </summary>
|
||||
public interface IJobCapsuleStore
|
||||
{
|
||||
/// <summary>Stores a job capsule.</summary>
|
||||
Task StoreAsync(JobCapsule capsule, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Retrieves a job capsule by ID.</summary>
|
||||
Task<JobCapsule?> GetAsync(Guid capsuleId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>Lists capsules for a job.</summary>
|
||||
Task<IReadOnlyList<JobCapsule>> ListForJobAsync(Guid jobId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory job capsule store for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryJobCapsuleStore : IJobCapsuleStore
|
||||
{
|
||||
private readonly Dictionary<Guid, JobCapsule> _capsules = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task StoreAsync(JobCapsule capsule, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_capsules[capsule.CapsuleId] = capsule;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<JobCapsule?> GetAsync(Guid capsuleId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return Task.FromResult(_capsules.GetValueOrDefault(capsuleId));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<JobCapsule>> ListForJobAsync(Guid jobId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _capsules.Values.Where(c => c.JobId == jobId).ToList();
|
||||
return Task.FromResult<IReadOnlyList<JobCapsule>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Gets all capsules (for testing).</summary>
|
||||
public IReadOnlyList<JobCapsule> GetAll()
|
||||
{
|
||||
lock (_lock) { return _capsules.Values.ToList(); }
|
||||
}
|
||||
|
||||
/// <summary>Clears all capsules (for testing).</summary>
|
||||
public void Clear()
|
||||
{
|
||||
lock (_lock) { _capsules.Clear(); }
|
||||
}
|
||||
|
||||
/// <summary>Gets capsule count.</summary>
|
||||
public int Count { get { lock (_lock) { return _capsules.Count; } } }
|
||||
}
|
||||
@@ -0,0 +1,286 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Redaction guard for sensitive data in job capsules.
|
||||
/// Per ORCH-OBS-53-001.
|
||||
/// </summary>
|
||||
public interface IJobRedactionGuard
|
||||
{
|
||||
/// <summary>
|
||||
/// Redacts sensitive data from a job payload JSON.
|
||||
/// </summary>
|
||||
string RedactPayload(string payloadJson);
|
||||
|
||||
/// <summary>
|
||||
/// Redacts sensitive data from an environment context.
|
||||
/// </summary>
|
||||
JobCapsuleEnvironment? RedactEnvironment(JobCapsuleEnvironment? environment);
|
||||
|
||||
/// <summary>
|
||||
/// Redacts sensitive data from an error.
|
||||
/// </summary>
|
||||
JobCapsuleError RedactError(JobCapsuleError error);
|
||||
|
||||
/// <summary>
|
||||
/// Redacts an identity string.
|
||||
/// </summary>
|
||||
string RedactIdentity(string identity);
|
||||
|
||||
/// <summary>
|
||||
/// Redacts a string value that may contain secrets.
|
||||
/// </summary>
|
||||
string RedactValue(string value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for job redaction guard.
|
||||
/// </summary>
|
||||
public sealed record JobRedactionGuardOptions(
|
||||
/// <summary>JSON property names to redact.</summary>
|
||||
IReadOnlyList<string> SensitivePropertyPatterns,
|
||||
|
||||
/// <summary>Patterns indicating sensitive content.</summary>
|
||||
IReadOnlyList<string> SensitiveContentPatterns,
|
||||
|
||||
/// <summary>Whether to hash redacted values.</summary>
|
||||
bool HashRedactedValues,
|
||||
|
||||
/// <summary>Maximum output length before truncation.</summary>
|
||||
int MaxOutputLength,
|
||||
|
||||
/// <summary>Whether to preserve email domain.</summary>
|
||||
bool PreserveEmailDomain)
|
||||
{
|
||||
/// <summary>Default redaction options.</summary>
|
||||
public static JobRedactionGuardOptions Default => new(
|
||||
SensitivePropertyPatterns: new[]
|
||||
{
|
||||
"(?i)password",
|
||||
"(?i)secret",
|
||||
"(?i)token",
|
||||
"(?i)api_?key",
|
||||
"(?i)auth",
|
||||
"(?i)credential",
|
||||
"(?i)private_?key",
|
||||
"(?i)access_?key",
|
||||
"(?i)connection_?string",
|
||||
"(?i)bearer",
|
||||
"(?i)signing_?key",
|
||||
"(?i)encryption_?key"
|
||||
},
|
||||
SensitiveContentPatterns: new[]
|
||||
{
|
||||
@"(?i)bearer\s+[a-zA-Z0-9\-_.]+",
|
||||
@"(?i)basic\s+[a-zA-Z0-9+/=]+",
|
||||
@"-----BEGIN\s+(?:RSA\s+)?PRIVATE\s+KEY-----",
|
||||
@"(?i)password\s*[=:]\s*\S+",
|
||||
@"(?i)secret\s*[=:]\s*\S+",
|
||||
@"(?i)token\s*[=:]\s*\S+",
|
||||
@"ghp_[a-zA-Z0-9]{36}", // GitHub PAT
|
||||
@"ghs_[a-zA-Z0-9]{36}", // GitHub App token
|
||||
@"sk-[a-zA-Z0-9]{48}", // OpenAI API key pattern
|
||||
@"AKIA[0-9A-Z]{16}" // AWS access key
|
||||
},
|
||||
HashRedactedValues: true,
|
||||
MaxOutputLength: 64 * 1024,
|
||||
PreserveEmailDomain: false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of job redaction guard.
|
||||
/// </summary>
|
||||
public sealed class JobRedactionGuard : IJobRedactionGuard
|
||||
{
|
||||
private const string RedactedPlaceholder = "[REDACTED]";
|
||||
private const string TruncatedSuffix = "...[TRUNCATED]";
|
||||
|
||||
private readonly JobRedactionGuardOptions _options;
|
||||
private readonly List<Regex> _sensitivePropertyPatterns;
|
||||
private readonly List<Regex> _sensitiveContentPatterns;
|
||||
|
||||
public JobRedactionGuard(JobRedactionGuardOptions? options = null)
|
||||
{
|
||||
_options = options ?? JobRedactionGuardOptions.Default;
|
||||
_sensitivePropertyPatterns = _options.SensitivePropertyPatterns
|
||||
.Select(p => new Regex(p, RegexOptions.Compiled))
|
||||
.ToList();
|
||||
_sensitiveContentPatterns = _options.SensitiveContentPatterns
|
||||
.Select(p => new Regex(p, RegexOptions.Compiled))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
public string RedactPayload(string payloadJson)
|
||||
{
|
||||
if (string.IsNullOrEmpty(payloadJson))
|
||||
return payloadJson;
|
||||
|
||||
var result = payloadJson;
|
||||
|
||||
// Redact JSON property values that match sensitive patterns
|
||||
foreach (var pattern in _sensitivePropertyPatterns)
|
||||
{
|
||||
// Match property name followed by colon and value
|
||||
var propertyPattern = new Regex(
|
||||
$@"(""{pattern.ToString().TrimStart('^').TrimEnd('$')}""\s*:\s*)""[^""]*""",
|
||||
RegexOptions.IgnoreCase);
|
||||
|
||||
result = propertyPattern.Replace(result, match =>
|
||||
{
|
||||
var prefix = match.Groups[1].Value;
|
||||
if (_options.HashRedactedValues)
|
||||
{
|
||||
var originalValue = match.Value[(match.Value.LastIndexOf('"', match.Value.Length - 2) + 1)..^1];
|
||||
return $@"{prefix}""[REDACTED:{ComputeShortHash(originalValue)}]""";
|
||||
}
|
||||
return $@"{prefix}""{RedactedPlaceholder}""";
|
||||
});
|
||||
}
|
||||
|
||||
// Redact sensitive content patterns
|
||||
foreach (var pattern in _sensitiveContentPatterns)
|
||||
{
|
||||
result = pattern.Replace(result, match =>
|
||||
{
|
||||
if (_options.HashRedactedValues)
|
||||
{
|
||||
return $"[REDACTED:{ComputeShortHash(match.Value)}]";
|
||||
}
|
||||
return RedactedPlaceholder;
|
||||
});
|
||||
}
|
||||
|
||||
// Truncate if too long
|
||||
if (result.Length > _options.MaxOutputLength)
|
||||
{
|
||||
result = result[..(_options.MaxOutputLength - TruncatedSuffix.Length)] + TruncatedSuffix;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public JobCapsuleEnvironment? RedactEnvironment(JobCapsuleEnvironment? environment)
|
||||
{
|
||||
if (environment is null)
|
||||
return null;
|
||||
|
||||
// Environment hash is already a hash, worker node and runtime are safe
|
||||
return environment;
|
||||
}
|
||||
|
||||
public JobCapsuleError RedactError(JobCapsuleError error)
|
||||
{
|
||||
var redactedMessage = RedactContent(error.Message);
|
||||
|
||||
return error with
|
||||
{
|
||||
Message = redactedMessage
|
||||
};
|
||||
}
|
||||
|
||||
public string RedactIdentity(string identity)
|
||||
{
|
||||
if (string.IsNullOrEmpty(identity))
|
||||
return identity;
|
||||
|
||||
// Check if it's an email
|
||||
if (identity.Contains('@'))
|
||||
{
|
||||
var parts = identity.Split('@');
|
||||
if (parts.Length == 2)
|
||||
{
|
||||
var localPart = parts[0];
|
||||
var domain = parts[1];
|
||||
|
||||
var redactedLocal = localPart.Length <= 2
|
||||
? RedactedPlaceholder
|
||||
: $"{localPart[0]}***{localPart[^1]}";
|
||||
|
||||
if (_options.PreserveEmailDomain)
|
||||
{
|
||||
return $"{redactedLocal}@{domain}";
|
||||
}
|
||||
return $"{redactedLocal}@[DOMAIN]";
|
||||
}
|
||||
}
|
||||
|
||||
// For non-email identities, hash if configured
|
||||
if (_options.HashRedactedValues)
|
||||
{
|
||||
return $"[USER:{ComputeShortHash(identity)}]";
|
||||
}
|
||||
|
||||
return RedactedPlaceholder;
|
||||
}
|
||||
|
||||
public string RedactValue(string value)
|
||||
{
|
||||
if (string.IsNullOrEmpty(value))
|
||||
return value;
|
||||
|
||||
if (_options.HashRedactedValues)
|
||||
{
|
||||
return $"[HASH:{ComputeShortHash(value)}]";
|
||||
}
|
||||
|
||||
return RedactedPlaceholder;
|
||||
}
|
||||
|
||||
private string RedactContent(string content)
|
||||
{
|
||||
if (string.IsNullOrEmpty(content))
|
||||
return content;
|
||||
|
||||
var result = content;
|
||||
|
||||
foreach (var pattern in _sensitiveContentPatterns)
|
||||
{
|
||||
result = pattern.Replace(result, match =>
|
||||
{
|
||||
if (_options.HashRedactedValues)
|
||||
{
|
||||
return $"[REDACTED:{ComputeShortHash(match.Value)}]";
|
||||
}
|
||||
return RedactedPlaceholder;
|
||||
});
|
||||
}
|
||||
|
||||
if (result.Length > _options.MaxOutputLength)
|
||||
{
|
||||
result = result[..(_options.MaxOutputLength - TruncatedSuffix.Length)] + TruncatedSuffix;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string ComputeShortHash(string value)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(value);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexString(hash)[..8].ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// No-op redaction guard for testing (preserves all data).
|
||||
/// </summary>
|
||||
public sealed class NoOpJobRedactionGuard : IJobRedactionGuard
|
||||
{
|
||||
/// <summary>Singleton instance.</summary>
|
||||
public static NoOpJobRedactionGuard Instance { get; } = new();
|
||||
|
||||
private NoOpJobRedactionGuard() { }
|
||||
|
||||
public string RedactPayload(string payloadJson) => payloadJson;
|
||||
|
||||
public JobCapsuleEnvironment? RedactEnvironment(JobCapsuleEnvironment? environment) => environment;
|
||||
|
||||
public JobCapsuleError RedactError(JobCapsuleError error) => error;
|
||||
|
||||
public string RedactIdentity(string identity) => identity;
|
||||
|
||||
public string RedactValue(string value) => value;
|
||||
}
|
||||
@@ -0,0 +1,274 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Hook invoked before and after evidence snapshots.
|
||||
/// Per ORCH-OBS-53-001.
|
||||
/// </summary>
|
||||
public interface ISnapshotHook
|
||||
{
|
||||
/// <summary>Hook priority (lower runs first).</summary>
|
||||
int Priority { get; }
|
||||
|
||||
/// <summary>Hook name for logging.</summary>
|
||||
string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Called before a snapshot is captured.
|
||||
/// </summary>
|
||||
Task OnPreSnapshotAsync(SnapshotHookContext context, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Called after a snapshot is captured.
|
||||
/// </summary>
|
||||
Task OnPostSnapshotAsync(SnapshotHookContext context, JobCapsule capsule, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context passed to snapshot hooks.
|
||||
/// </summary>
|
||||
public sealed record SnapshotHookContext(
|
||||
/// <summary>Tenant scope.</summary>
|
||||
string TenantId,
|
||||
|
||||
/// <summary>Job or run identifier.</summary>
|
||||
Guid JobId,
|
||||
|
||||
/// <summary>Capsule kind being captured.</summary>
|
||||
JobCapsuleKind Kind,
|
||||
|
||||
/// <summary>Additional context data.</summary>
|
||||
IReadOnlyDictionary<string, object>? Data = null);
|
||||
|
||||
/// <summary>
|
||||
/// Service for invoking snapshot hooks.
|
||||
/// </summary>
|
||||
public interface ISnapshotHookInvoker
|
||||
{
|
||||
/// <summary>
|
||||
/// Invokes all pre-snapshot hooks.
|
||||
/// </summary>
|
||||
Task InvokePreSnapshotAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
JobCapsuleKind kind,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invokes all post-snapshot hooks.
|
||||
/// </summary>
|
||||
Task InvokePostSnapshotAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
JobCapsule capsule,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of snapshot hook invoker.
|
||||
/// </summary>
|
||||
public sealed class SnapshotHookInvoker : ISnapshotHookInvoker
|
||||
{
|
||||
private readonly IReadOnlyList<ISnapshotHook> _hooks;
|
||||
private readonly ILogger<SnapshotHookInvoker> _logger;
|
||||
private readonly SnapshotHookInvokerOptions _options;
|
||||
|
||||
public SnapshotHookInvoker(
|
||||
IEnumerable<ISnapshotHook> hooks,
|
||||
ILogger<SnapshotHookInvoker> logger,
|
||||
SnapshotHookInvokerOptions? options = null)
|
||||
{
|
||||
_hooks = hooks.OrderBy(h => h.Priority).ToList();
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options ?? SnapshotHookInvokerOptions.Default;
|
||||
}
|
||||
|
||||
public async Task InvokePreSnapshotAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
JobCapsuleKind kind,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_hooks.Count == 0)
|
||||
return;
|
||||
|
||||
var context = new SnapshotHookContext(tenantId, jobId, kind);
|
||||
|
||||
foreach (var hook in _hooks)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
cts.CancelAfter(_options.HookTimeout);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Invoking pre-snapshot hook {HookName} for job {JobId}",
|
||||
hook.Name, jobId);
|
||||
|
||||
await hook.OnPreSnapshotAsync(context, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Pre-snapshot hook {HookName} timed out for job {JobId}",
|
||||
hook.Name, jobId);
|
||||
|
||||
if (_options.FailOnHookTimeout)
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Pre-snapshot hook {HookName} failed for job {JobId}",
|
||||
hook.Name, jobId);
|
||||
|
||||
if (_options.FailOnHookError)
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async Task InvokePostSnapshotAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
JobCapsule capsule,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_hooks.Count == 0)
|
||||
return;
|
||||
|
||||
var context = new SnapshotHookContext(tenantId, jobId, capsule.Kind);
|
||||
|
||||
foreach (var hook in _hooks)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
cts.CancelAfter(_options.HookTimeout);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Invoking post-snapshot hook {HookName} for job {JobId} capsule {CapsuleId}",
|
||||
hook.Name, jobId, capsule.CapsuleId);
|
||||
|
||||
await hook.OnPostSnapshotAsync(context, capsule, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Post-snapshot hook {HookName} timed out for job {JobId}",
|
||||
hook.Name, jobId);
|
||||
|
||||
if (_options.FailOnHookTimeout)
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Post-snapshot hook {HookName} failed for job {JobId}",
|
||||
hook.Name, jobId);
|
||||
|
||||
if (_options.FailOnHookError)
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for snapshot hook invoker.
|
||||
/// </summary>
|
||||
public sealed record SnapshotHookInvokerOptions(
|
||||
/// <summary>Timeout for individual hooks.</summary>
|
||||
TimeSpan HookTimeout,
|
||||
|
||||
/// <summary>Whether to fail on hook timeout.</summary>
|
||||
bool FailOnHookTimeout,
|
||||
|
||||
/// <summary>Whether to fail on hook error.</summary>
|
||||
bool FailOnHookError)
|
||||
{
|
||||
/// <summary>Default options.</summary>
|
||||
public static SnapshotHookInvokerOptions Default => new(
|
||||
HookTimeout: TimeSpan.FromSeconds(30),
|
||||
FailOnHookTimeout: false,
|
||||
FailOnHookError: false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot hook that emits timeline events.
|
||||
/// </summary>
|
||||
public sealed class TimelineSnapshotHook : ISnapshotHook
|
||||
{
|
||||
public int Priority => 100;
|
||||
public string Name => "timeline";
|
||||
|
||||
public Task OnPreSnapshotAsync(SnapshotHookContext context, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// No action needed before snapshot
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task OnPostSnapshotAsync(SnapshotHookContext context, JobCapsule capsule, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Timeline events are emitted by the generator itself
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot hook for audit logging.
|
||||
/// </summary>
|
||||
public sealed class AuditSnapshotHook : ISnapshotHook
|
||||
{
|
||||
private readonly ILogger<AuditSnapshotHook> _logger;
|
||||
|
||||
public AuditSnapshotHook(ILogger<AuditSnapshotHook> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public int Priority => 50;
|
||||
public string Name => "audit";
|
||||
|
||||
public Task OnPreSnapshotAsync(SnapshotHookContext context, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Audit: Pre-snapshot for {Kind} job {JobId} tenant {TenantId}",
|
||||
context.Kind, context.JobId, context.TenantId);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task OnPostSnapshotAsync(SnapshotHookContext context, JobCapsule capsule, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Audit: Post-snapshot capsule {CapsuleId} for {Kind} job {JobId}, root hash {RootHash}",
|
||||
capsule.CapsuleId, context.Kind, context.JobId, capsule.RootHash);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// No-op snapshot hook invoker for testing.
|
||||
/// </summary>
|
||||
public sealed class NoOpSnapshotHookInvoker : ISnapshotHookInvoker
|
||||
{
|
||||
/// <summary>Singleton instance.</summary>
|
||||
public static NoOpSnapshotHookInvoker Instance { get; } = new();
|
||||
|
||||
private NoOpSnapshotHookInvoker() { }
|
||||
|
||||
public Task InvokePreSnapshotAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
JobCapsuleKind kind,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task InvokePostSnapshotAsync(
|
||||
string tenantId,
|
||||
Guid jobId,
|
||||
JobCapsule capsule,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
@@ -0,0 +1,505 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
|
||||
namespace StellaOps.Orchestrator.Core.Observability;
|
||||
|
||||
/// <summary>
|
||||
/// Incident mode hooks for the Orchestrator service.
|
||||
/// Per ORCH-OBS-55-001: Incident mode hooks with sampling overrides,
|
||||
/// extended retention, debug spans, and automatic activation on SLO burn-rate breach.
|
||||
/// </summary>
|
||||
public interface IIncidentModeHooks
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluates SLO burn rate and potentially activates incident mode.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="sloName">SLO name that breached.</param>
|
||||
/// <param name="burnRate">Current burn rate.</param>
|
||||
/// <param name="threshold">Threshold that was breached.</param>
|
||||
/// <returns>True if incident mode was activated.</returns>
|
||||
Task<IncidentModeActivationResult> EvaluateBurnRateBreachAsync(
|
||||
string tenantId,
|
||||
string sloName,
|
||||
double burnRate,
|
||||
double threshold);
|
||||
|
||||
/// <summary>
|
||||
/// Manually activates incident mode for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="actor">Actor triggering the activation.</param>
|
||||
/// <param name="reason">Reason for activation.</param>
|
||||
/// <param name="ttl">Optional TTL override.</param>
|
||||
/// <returns>Activation result.</returns>
|
||||
Task<IncidentModeActivationResult> ActivateAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
string reason,
|
||||
TimeSpan? ttl = null);
|
||||
|
||||
/// <summary>
|
||||
/// Deactivates incident mode for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="actor">Actor triggering the deactivation.</param>
|
||||
/// <param name="reason">Reason for deactivation.</param>
|
||||
/// <returns>Deactivation result.</returns>
|
||||
Task<IncidentModeDeactivationResult> DeactivateAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
string reason);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current incident mode state for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>Current state.</returns>
|
||||
IncidentModeState GetState(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if incident mode is active for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>True if active.</returns>
|
||||
bool IsActive(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the effective sampling rate during incident mode.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>Sampling rate (0.0-1.0).</returns>
|
||||
double GetEffectiveSamplingRate(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the effective log retention during incident mode.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>Retention duration.</returns>
|
||||
TimeSpan GetEffectiveRetention(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if debug spans should be enabled.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>True if debug spans should be enabled.</returns>
|
||||
bool IsDebugSpansEnabled(string tenantId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of incident mode activation.
|
||||
/// </summary>
|
||||
public sealed record IncidentModeActivationResult(
|
||||
bool Success,
|
||||
bool WasAlreadyActive,
|
||||
IncidentModeState State,
|
||||
string? ErrorMessage = null)
|
||||
{
|
||||
public static IncidentModeActivationResult Activated(IncidentModeState state)
|
||||
=> new(true, false, state);
|
||||
|
||||
public static IncidentModeActivationResult AlreadyActive(IncidentModeState state)
|
||||
=> new(true, true, state);
|
||||
|
||||
public static IncidentModeActivationResult Failed(string error)
|
||||
=> new(false, false, IncidentModeState.Inactive, error);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of incident mode deactivation.
|
||||
/// </summary>
|
||||
public sealed record IncidentModeDeactivationResult(
|
||||
bool Success,
|
||||
bool WasActive,
|
||||
string? ErrorMessage = null)
|
||||
{
|
||||
public static IncidentModeDeactivationResult Deactivated()
|
||||
=> new(true, true);
|
||||
|
||||
public static IncidentModeDeactivationResult WasNotActive()
|
||||
=> new(true, false);
|
||||
|
||||
public static IncidentModeDeactivationResult Failed(string error)
|
||||
=> new(false, false, error);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Current state of incident mode for a tenant.
|
||||
/// </summary>
|
||||
public sealed record IncidentModeState(
|
||||
bool IsActive,
|
||||
DateTimeOffset? ActivatedAt,
|
||||
DateTimeOffset? ExpiresAt,
|
||||
string? ActivatedBy,
|
||||
string? ActivationReason,
|
||||
IncidentModeSource Source,
|
||||
double SamplingRateOverride,
|
||||
TimeSpan RetentionOverride,
|
||||
bool DebugSpansEnabled)
|
||||
{
|
||||
public static IncidentModeState Inactive => new(
|
||||
false, null, null, null, null,
|
||||
IncidentModeSource.None, 0.0, TimeSpan.Zero, false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Source that triggered incident mode.
|
||||
/// </summary>
|
||||
public enum IncidentModeSource
|
||||
{
|
||||
None,
|
||||
Manual,
|
||||
Api,
|
||||
Cli,
|
||||
BurnRateAlert,
|
||||
Configuration,
|
||||
Restored
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for incident mode behavior.
|
||||
/// </summary>
|
||||
public sealed record IncidentModeHooksOptions
|
||||
{
|
||||
public const string SectionName = "Orchestrator:IncidentMode";
|
||||
|
||||
/// <summary>
|
||||
/// Default TTL for incident mode activation.
|
||||
/// </summary>
|
||||
public TimeSpan DefaultTtl { get; init; } = TimeSpan.FromHours(4);
|
||||
|
||||
/// <summary>
|
||||
/// Burn rate threshold that triggers automatic activation.
|
||||
/// Default is 6x (warning level).
|
||||
/// </summary>
|
||||
public double BurnRateActivationThreshold { get; init; } = 6.0;
|
||||
|
||||
/// <summary>
|
||||
/// Sampling rate override during incident mode (0.0-1.0).
|
||||
/// </summary>
|
||||
public double SamplingRateOverride { get; init; } = 1.0;
|
||||
|
||||
/// <summary>
|
||||
/// Retention duration override during incident mode.
|
||||
/// </summary>
|
||||
public TimeSpan RetentionOverride { get; init; } = TimeSpan.FromDays(30);
|
||||
|
||||
/// <summary>
|
||||
/// Normal sampling rate when not in incident mode.
|
||||
/// </summary>
|
||||
public double NormalSamplingRate { get; init; } = 0.1;
|
||||
|
||||
/// <summary>
|
||||
/// Normal retention duration when not in incident mode.
|
||||
/// </summary>
|
||||
public TimeSpan NormalRetention { get; init; } = TimeSpan.FromDays(7);
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable debug spans during incident mode.
|
||||
/// </summary>
|
||||
public bool EnableDebugSpans { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Cooldown period before re-activation on burn rate breach.
|
||||
/// </summary>
|
||||
public TimeSpan ReactivationCooldown { get; init; } = TimeSpan.FromMinutes(15);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of incident mode hooks.
|
||||
/// </summary>
|
||||
public sealed class IncidentModeHooks : IIncidentModeHooks
|
||||
{
|
||||
private readonly ITimelineEventEmitter _eventEmitter;
|
||||
private readonly ILogger<IncidentModeHooks> _logger;
|
||||
private readonly IncidentModeHooksOptions _options;
|
||||
private readonly Dictionary<string, IncidentModeState> _tenantStates = new();
|
||||
private readonly Dictionary<string, DateTimeOffset> _lastActivations = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public IncidentModeHooks(
|
||||
ITimelineEventEmitter eventEmitter,
|
||||
ILogger<IncidentModeHooks> logger,
|
||||
IncidentModeHooksOptions? options = null)
|
||||
{
|
||||
_eventEmitter = eventEmitter ?? throw new ArgumentNullException(nameof(eventEmitter));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options ?? new IncidentModeHooksOptions();
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IncidentModeActivationResult> EvaluateBurnRateBreachAsync(
|
||||
string tenantId,
|
||||
string sloName,
|
||||
double burnRate,
|
||||
double threshold)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(tenantId);
|
||||
ArgumentException.ThrowIfNullOrEmpty(sloName);
|
||||
|
||||
// Check if burn rate exceeds activation threshold
|
||||
if (burnRate < _options.BurnRateActivationThreshold)
|
||||
{
|
||||
return IncidentModeActivationResult.Failed(
|
||||
$"Burn rate {burnRate:F2}x below activation threshold {_options.BurnRateActivationThreshold:F2}x");
|
||||
}
|
||||
|
||||
// Check cooldown period
|
||||
lock (_lock)
|
||||
{
|
||||
if (_lastActivations.TryGetValue(tenantId, out var lastActivation))
|
||||
{
|
||||
var timeSinceLastActivation = DateTimeOffset.UtcNow - lastActivation;
|
||||
if (timeSinceLastActivation < _options.ReactivationCooldown)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Skipping incident mode activation for tenant {TenantId} due to cooldown ({Remaining}s remaining)",
|
||||
tenantId,
|
||||
(_options.ReactivationCooldown - timeSinceLastActivation).TotalSeconds);
|
||||
return IncidentModeActivationResult.Failed("Cooldown period active");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var reason = $"SLO '{sloName}' burn rate {burnRate:F2}x exceeded threshold {threshold:F2}x";
|
||||
return await ActivateInternalAsync(
|
||||
tenantId,
|
||||
"system:burn-rate-monitor",
|
||||
reason,
|
||||
IncidentModeSource.BurnRateAlert,
|
||||
_options.DefaultTtl);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<IncidentModeActivationResult> ActivateAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
string reason,
|
||||
TimeSpan? ttl = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(tenantId);
|
||||
ArgumentException.ThrowIfNullOrEmpty(actor);
|
||||
ArgumentException.ThrowIfNullOrEmpty(reason);
|
||||
|
||||
var source = actor.StartsWith("api:", StringComparison.OrdinalIgnoreCase)
|
||||
? IncidentModeSource.Api
|
||||
: actor.StartsWith("cli:", StringComparison.OrdinalIgnoreCase)
|
||||
? IncidentModeSource.Cli
|
||||
: IncidentModeSource.Manual;
|
||||
|
||||
return ActivateInternalAsync(tenantId, actor, reason, source, ttl ?? _options.DefaultTtl);
|
||||
}
|
||||
|
||||
private async Task<IncidentModeActivationResult> ActivateInternalAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
string reason,
|
||||
IncidentModeSource source,
|
||||
TimeSpan ttl)
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var expiresAt = now + ttl;
|
||||
|
||||
var newState = new IncidentModeState(
|
||||
IsActive: true,
|
||||
ActivatedAt: now,
|
||||
ExpiresAt: expiresAt,
|
||||
ActivatedBy: actor,
|
||||
ActivationReason: reason,
|
||||
Source: source,
|
||||
SamplingRateOverride: _options.SamplingRateOverride,
|
||||
RetentionOverride: _options.RetentionOverride,
|
||||
DebugSpansEnabled: _options.EnableDebugSpans);
|
||||
|
||||
bool wasAlreadyActive;
|
||||
lock (_lock)
|
||||
{
|
||||
wasAlreadyActive = _tenantStates.TryGetValue(tenantId, out var existingState) &&
|
||||
existingState.IsActive;
|
||||
_tenantStates[tenantId] = newState;
|
||||
_lastActivations[tenantId] = now;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Incident mode activated for tenant {TenantId} by {Actor}: {Reason} (expires: {ExpiresAt})",
|
||||
tenantId, actor, reason, expiresAt);
|
||||
|
||||
// Emit timeline event
|
||||
await EmitActivationEventAsync(tenantId, newState, wasAlreadyActive);
|
||||
|
||||
return wasAlreadyActive
|
||||
? IncidentModeActivationResult.AlreadyActive(newState)
|
||||
: IncidentModeActivationResult.Activated(newState);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IncidentModeDeactivationResult> DeactivateAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
string reason)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(tenantId);
|
||||
ArgumentException.ThrowIfNullOrEmpty(actor);
|
||||
|
||||
IncidentModeState? previousState;
|
||||
lock (_lock)
|
||||
{
|
||||
if (!_tenantStates.TryGetValue(tenantId, out previousState) || !previousState.IsActive)
|
||||
{
|
||||
return IncidentModeDeactivationResult.WasNotActive();
|
||||
}
|
||||
|
||||
_tenantStates[tenantId] = IncidentModeState.Inactive;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Incident mode deactivated for tenant {TenantId} by {Actor}: {Reason}",
|
||||
tenantId, actor, reason);
|
||||
|
||||
// Emit timeline event
|
||||
await EmitDeactivationEventAsync(tenantId, previousState, actor, reason);
|
||||
|
||||
return IncidentModeDeactivationResult.Deactivated();
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IncidentModeState GetState(string tenantId)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (_tenantStates.TryGetValue(tenantId, out var state))
|
||||
{
|
||||
// Check if expired
|
||||
if (state.IsActive && state.ExpiresAt.HasValue &&
|
||||
DateTimeOffset.UtcNow >= state.ExpiresAt.Value)
|
||||
{
|
||||
_tenantStates[tenantId] = IncidentModeState.Inactive;
|
||||
return IncidentModeState.Inactive;
|
||||
}
|
||||
return state;
|
||||
}
|
||||
return IncidentModeState.Inactive;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public bool IsActive(string tenantId) => GetState(tenantId).IsActive;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public double GetEffectiveSamplingRate(string tenantId)
|
||||
{
|
||||
var state = GetState(tenantId);
|
||||
return state.IsActive ? state.SamplingRateOverride : _options.NormalSamplingRate;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public TimeSpan GetEffectiveRetention(string tenantId)
|
||||
{
|
||||
var state = GetState(tenantId);
|
||||
return state.IsActive ? state.RetentionOverride : _options.NormalRetention;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public bool IsDebugSpansEnabled(string tenantId)
|
||||
{
|
||||
var state = GetState(tenantId);
|
||||
return state.IsActive && state.DebugSpansEnabled;
|
||||
}
|
||||
|
||||
private async Task EmitActivationEventAsync(
|
||||
string tenantId,
|
||||
IncidentModeState state,
|
||||
bool wasExtension)
|
||||
{
|
||||
var eventType = wasExtension
|
||||
? "orchestrator.incident_mode.extended"
|
||||
: "orchestrator.incident_mode.activated";
|
||||
|
||||
var @event = new TimelineEvent(
|
||||
EventSeq: null,
|
||||
EventId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
EventType: eventType,
|
||||
Source: "orchestrator",
|
||||
OccurredAt: DateTimeOffset.UtcNow,
|
||||
ReceivedAt: null,
|
||||
CorrelationId: Guid.NewGuid().ToString(),
|
||||
TraceId: null,
|
||||
SpanId: null,
|
||||
Actor: state.ActivatedBy,
|
||||
Severity: TimelineEventSeverity.Warning,
|
||||
Attributes: new Dictionary<string, string>
|
||||
{
|
||||
["reason"] = state.ActivationReason ?? string.Empty,
|
||||
["source"] = state.Source.ToString(),
|
||||
["expires_at"] = state.ExpiresAt?.ToString("O") ?? string.Empty,
|
||||
["sampling_rate_override"] = state.SamplingRateOverride.ToString(),
|
||||
["retention_override_days"] = state.RetentionOverride.TotalDays.ToString(),
|
||||
["debug_spans_enabled"] = state.DebugSpansEnabled.ToString()
|
||||
},
|
||||
PayloadHash: null,
|
||||
RawPayloadJson: null,
|
||||
NormalizedPayloadJson: null,
|
||||
EvidencePointer: null,
|
||||
RunId: null,
|
||||
JobId: null,
|
||||
ProjectId: null);
|
||||
|
||||
try
|
||||
{
|
||||
await _eventEmitter.EmitAsync(@event);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to emit incident mode activation event for tenant {TenantId}", tenantId);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task EmitDeactivationEventAsync(
|
||||
string tenantId,
|
||||
IncidentModeState previousState,
|
||||
string actor,
|
||||
string reason)
|
||||
{
|
||||
var duration = previousState.ActivatedAt.HasValue
|
||||
? DateTimeOffset.UtcNow - previousState.ActivatedAt.Value
|
||||
: TimeSpan.Zero;
|
||||
|
||||
var @event = new TimelineEvent(
|
||||
EventSeq: null,
|
||||
EventId: Guid.NewGuid(),
|
||||
TenantId: tenantId,
|
||||
EventType: "orchestrator.incident_mode.deactivated",
|
||||
Source: "orchestrator",
|
||||
OccurredAt: DateTimeOffset.UtcNow,
|
||||
ReceivedAt: null,
|
||||
CorrelationId: Guid.NewGuid().ToString(),
|
||||
TraceId: null,
|
||||
SpanId: null,
|
||||
Actor: actor,
|
||||
Severity: TimelineEventSeverity.Info,
|
||||
Attributes: new Dictionary<string, string>
|
||||
{
|
||||
["reason"] = reason ?? string.Empty,
|
||||
["previous_source"] = previousState.Source.ToString(),
|
||||
["activated_at"] = previousState.ActivatedAt?.ToString("O") ?? string.Empty,
|
||||
["duration_seconds"] = duration.TotalSeconds.ToString()
|
||||
},
|
||||
PayloadHash: null,
|
||||
RawPayloadJson: null,
|
||||
NormalizedPayloadJson: null,
|
||||
EvidencePointer: null,
|
||||
RunId: null,
|
||||
JobId: null,
|
||||
ProjectId: null);
|
||||
|
||||
try
|
||||
{
|
||||
await _eventEmitter.EmitAsync(@event);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to emit incident mode deactivation event for tenant {TenantId}", tenantId);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,327 @@
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Telemetry.Core;
|
||||
|
||||
namespace StellaOps.Orchestrator.Infrastructure.Observability;
|
||||
|
||||
/// <summary>
|
||||
/// Golden signal metrics integration for the Orchestrator service.
|
||||
/// Per ORCH-OBS-51-001: Publish golden-signal metrics and SLOs.
|
||||
/// </summary>
|
||||
public sealed class OrchestratorGoldenSignals
|
||||
{
|
||||
private readonly GoldenSignalMetrics _metrics;
|
||||
private readonly ILogger<OrchestratorGoldenSignals> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Activity source for orchestrator spans.
|
||||
/// </summary>
|
||||
public static readonly ActivitySource ActivitySource = new("StellaOps.Orchestrator", "1.0.0");
|
||||
|
||||
public OrchestratorGoldenSignals(
|
||||
GoldenSignalMetrics metrics,
|
||||
ILogger<OrchestratorGoldenSignals> logger)
|
||||
{
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records job scheduling latency.
|
||||
/// </summary>
|
||||
public void RecordSchedulingLatency(string tenantId, string jobType, double latencyMs)
|
||||
{
|
||||
_metrics.RecordLatency(
|
||||
latencyMs / 1000.0,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("job_type", jobType),
|
||||
GoldenSignalMetrics.Tag("operation", "scheduling"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records job dispatch latency.
|
||||
/// </summary>
|
||||
public void RecordDispatchLatency(string tenantId, string jobType, double latencyMs)
|
||||
{
|
||||
_metrics.RecordLatency(
|
||||
latencyMs / 1000.0,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("job_type", jobType),
|
||||
GoldenSignalMetrics.Tag("operation", "dispatch"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records job completion latency.
|
||||
/// </summary>
|
||||
public void RecordJobLatency(string tenantId, string jobType, double durationSeconds)
|
||||
{
|
||||
_metrics.RecordLatency(
|
||||
durationSeconds,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("job_type", jobType),
|
||||
GoldenSignalMetrics.Tag("operation", "execution"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records an API request.
|
||||
/// </summary>
|
||||
public void RecordRequest(string tenantId, string endpoint, string method, int statusCode)
|
||||
{
|
||||
_metrics.IncrementRequests(1,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("endpoint", endpoint),
|
||||
GoldenSignalMetrics.Tag("method", method),
|
||||
GoldenSignalMetrics.Tag("status_code", statusCode.ToString()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a job error.
|
||||
/// </summary>
|
||||
public void RecordJobError(string tenantId, string jobType, string errorType)
|
||||
{
|
||||
_metrics.IncrementErrors(1,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("job_type", jobType),
|
||||
GoldenSignalMetrics.Tag("error_type", errorType),
|
||||
GoldenSignalMetrics.Tag("operation", "job_execution"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records an API error.
|
||||
/// </summary>
|
||||
public void RecordApiError(string tenantId, string endpoint, string errorType)
|
||||
{
|
||||
_metrics.IncrementErrors(1,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("endpoint", endpoint),
|
||||
GoldenSignalMetrics.Tag("error_type", errorType),
|
||||
GoldenSignalMetrics.Tag("operation", "api"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a scheduling error.
|
||||
/// </summary>
|
||||
public void RecordSchedulingError(string tenantId, string jobType, string reason)
|
||||
{
|
||||
_metrics.IncrementErrors(1,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("job_type", jobType),
|
||||
GoldenSignalMetrics.Tag("error_type", reason),
|
||||
GoldenSignalMetrics.Tag("operation", "scheduling"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records traffic (jobs created).
|
||||
/// </summary>
|
||||
public void RecordJobCreated(string tenantId, string jobType)
|
||||
{
|
||||
_metrics.IncrementRequests(1,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("job_type", jobType),
|
||||
GoldenSignalMetrics.Tag("operation", "job_created"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records traffic (runs created).
|
||||
/// </summary>
|
||||
public void RecordRunCreated(string tenantId, string runType)
|
||||
{
|
||||
_metrics.IncrementRequests(1,
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("run_type", runType),
|
||||
GoldenSignalMetrics.Tag("operation", "run_created"));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a measurement scope for latency tracking.
|
||||
/// </summary>
|
||||
public IDisposable MeasureLatency(string tenantId, string operation, params KeyValuePair<string, object?>[] additionalTags)
|
||||
{
|
||||
var tags = new List<KeyValuePair<string, object?>>
|
||||
{
|
||||
GoldenSignalMetrics.Tag("tenant_id", tenantId),
|
||||
GoldenSignalMetrics.Tag("operation", operation)
|
||||
};
|
||||
tags.AddRange(additionalTags);
|
||||
return _metrics.MeasureLatency([.. tags]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Starts a new activity span for the operation.
|
||||
/// </summary>
|
||||
public Activity? StartActivity(string operationName, ActivityKind kind = ActivityKind.Internal)
|
||||
{
|
||||
return ActivitySource.StartActivity(operationName, kind);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Starts a scheduling activity span.
|
||||
/// </summary>
|
||||
public Activity? StartSchedulingActivity(string tenantId, string jobType, Guid jobId)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity("orchestrator.scheduling", ActivityKind.Internal);
|
||||
if (activity is not null)
|
||||
{
|
||||
activity.SetTag("tenant_id", tenantId);
|
||||
activity.SetTag("job_type", jobType);
|
||||
activity.SetTag("job_id", jobId.ToString());
|
||||
}
|
||||
return activity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Starts a job dispatch activity span.
|
||||
/// </summary>
|
||||
public Activity? StartDispatchActivity(string tenantId, string jobType, Guid jobId)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity("orchestrator.dispatch", ActivityKind.Internal);
|
||||
if (activity is not null)
|
||||
{
|
||||
activity.SetTag("tenant_id", tenantId);
|
||||
activity.SetTag("job_type", jobType);
|
||||
activity.SetTag("job_id", jobId.ToString());
|
||||
}
|
||||
return activity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers a saturation provider for queue depth monitoring.
|
||||
/// </summary>
|
||||
public void SetQueueSaturationProvider(Func<double> provider)
|
||||
{
|
||||
_metrics.SetSaturationProvider(provider);
|
||||
_logger.LogInformation("Queue saturation provider registered");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SLO (Service Level Objective) definitions for the Orchestrator service.
|
||||
/// </summary>
|
||||
public static class OrchestratorSloDefinitions
|
||||
{
|
||||
/// <summary>
|
||||
/// Job scheduling latency SLO: 99% of jobs should be scheduled within 5 seconds.
|
||||
/// </summary>
|
||||
public static readonly SloDefinition SchedulingLatency = new(
|
||||
Name: "orchestrator_scheduling_latency",
|
||||
Description: "99% of jobs scheduled within 5 seconds",
|
||||
Objective: 0.99,
|
||||
Window: TimeSpan.FromDays(7),
|
||||
MetricName: "orchestrator.scheduling.latency.seconds",
|
||||
ThresholdSeconds: 5.0);
|
||||
|
||||
/// <summary>
|
||||
/// Job dispatch latency SLO: 99.5% of jobs dispatched within 10 seconds.
|
||||
/// </summary>
|
||||
public static readonly SloDefinition DispatchLatency = new(
|
||||
Name: "orchestrator_dispatch_latency",
|
||||
Description: "99.5% of jobs dispatched within 10 seconds",
|
||||
Objective: 0.995,
|
||||
Window: TimeSpan.FromDays(7),
|
||||
MetricName: "orchestrator.scale.dispatch_latency.ms",
|
||||
ThresholdSeconds: 10.0);
|
||||
|
||||
/// <summary>
|
||||
/// Job success rate SLO: 99% of jobs should complete successfully.
|
||||
/// </summary>
|
||||
public static readonly SloDefinition JobSuccessRate = new(
|
||||
Name: "orchestrator_job_success_rate",
|
||||
Description: "99% of jobs complete successfully",
|
||||
Objective: 0.99,
|
||||
Window: TimeSpan.FromDays(7),
|
||||
MetricName: "orchestrator.jobs.completed",
|
||||
ThresholdSeconds: null);
|
||||
|
||||
/// <summary>
|
||||
/// API availability SLO: 99.9% of API requests should succeed.
|
||||
/// </summary>
|
||||
public static readonly SloDefinition ApiAvailability = new(
|
||||
Name: "orchestrator_api_availability",
|
||||
Description: "99.9% of API requests succeed",
|
||||
Objective: 0.999,
|
||||
Window: TimeSpan.FromDays(7),
|
||||
MetricName: "stellaops_golden_signal_requests_total",
|
||||
ThresholdSeconds: null);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all SLO definitions.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<SloDefinition> All =>
|
||||
[
|
||||
SchedulingLatency,
|
||||
DispatchLatency,
|
||||
JobSuccessRate,
|
||||
ApiAvailability
|
||||
];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SLO definition record.
|
||||
/// </summary>
|
||||
public sealed record SloDefinition(
|
||||
string Name,
|
||||
string Description,
|
||||
double Objective,
|
||||
TimeSpan Window,
|
||||
string MetricName,
|
||||
double? ThresholdSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Burn rate alert configuration.
|
||||
/// </summary>
|
||||
public static class OrchestratorBurnRateAlerts
|
||||
{
|
||||
/// <summary>
|
||||
/// Critical burn rate threshold (14x consumes error budget in 2 hours).
|
||||
/// </summary>
|
||||
public const double CriticalBurnRate = 14.0;
|
||||
|
||||
/// <summary>
|
||||
/// Warning burn rate threshold (6x consumes error budget in 6 hours).
|
||||
/// </summary>
|
||||
public const double WarningBurnRate = 6.0;
|
||||
|
||||
/// <summary>
|
||||
/// Info burn rate threshold (1x is sustainable, anything higher is consuming budget).
|
||||
/// </summary>
|
||||
public const double InfoBurnRate = 1.0;
|
||||
|
||||
/// <summary>
|
||||
/// Short window for multi-window alerts (5 minutes).
|
||||
/// </summary>
|
||||
public static readonly TimeSpan ShortWindow = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Long window for multi-window alerts (1 hour).
|
||||
/// </summary>
|
||||
public static readonly TimeSpan LongWindow = TimeSpan.FromHours(1);
|
||||
|
||||
/// <summary>
|
||||
/// Gets Prometheus alert rule expressions for burn rate monitoring.
|
||||
/// </summary>
|
||||
public static IReadOnlyDictionary<string, string> GetAlertRules(string sloName, double objective)
|
||||
{
|
||||
var errorRate = 1.0 - objective;
|
||||
return new Dictionary<string, string>
|
||||
{
|
||||
[$"{sloName}_burn_rate_critical"] = $@"
|
||||
(
|
||||
sum(rate({sloName}_errors_total[5m])) / sum(rate({sloName}_requests_total[5m]))
|
||||
) / {errorRate} > {CriticalBurnRate}
|
||||
and
|
||||
(
|
||||
sum(rate({sloName}_errors_total[1h])) / sum(rate({sloName}_requests_total[1h]))
|
||||
) / {errorRate} > {CriticalBurnRate}
|
||||
",
|
||||
[$"{sloName}_burn_rate_warning"] = $@"
|
||||
(
|
||||
sum(rate({sloName}_errors_total[30m])) / sum(rate({sloName}_requests_total[30m]))
|
||||
) / {errorRate} > {WarningBurnRate}
|
||||
and
|
||||
(
|
||||
sum(rate({sloName}_errors_total[6h])) / sum(rate({sloName}_requests_total[6h]))
|
||||
) / {errorRate} > {WarningBurnRate}
|
||||
"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,940 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.Infrastructure.Repositories;
|
||||
|
||||
namespace StellaOps.Orchestrator.Infrastructure.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of pack registry repository.
|
||||
/// Per 150.B-PacksRegistry: Postgres-backed pack registry with tenant/project scoping.
|
||||
/// </summary>
|
||||
public sealed class PostgresPackRegistryRepository : IPackRegistryRepository
|
||||
{
|
||||
private readonly OrchestratorDataSource _dataSource;
|
||||
private readonly ILogger<PostgresPackRegistryRepository> _logger;
|
||||
|
||||
private const string PackColumns = """
|
||||
pack_id, tenant_id, project_id, name, display_name, description,
|
||||
status, created_by, created_at, updated_at, updated_by,
|
||||
metadata, tags, icon_uri, version_count, latest_version,
|
||||
published_at, published_by
|
||||
""";
|
||||
|
||||
private const string VersionColumns = """
|
||||
pack_version_id, tenant_id, pack_id, version, sem_ver, status,
|
||||
artifact_uri, artifact_digest, artifact_mime_type, artifact_size_bytes,
|
||||
manifest_json, manifest_digest, release_notes, min_engine_version, dependencies,
|
||||
created_by, created_at, updated_at, updated_by,
|
||||
published_at, published_by, deprecated_at, deprecated_by, deprecation_reason,
|
||||
signature_uri, signature_algorithm, signed_by, signed_at,
|
||||
metadata, download_count
|
||||
""";
|
||||
|
||||
public PostgresPackRegistryRepository(
|
||||
OrchestratorDataSource dataSource,
|
||||
ILogger<PostgresPackRegistryRepository> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
// Pack CRUD
|
||||
|
||||
public async Task<Pack?> GetPackByIdAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
var sql = $"SELECT {PackColumns} FROM packs WHERE tenant_id = @tenant_id AND pack_id = @pack_id";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapPack(reader);
|
||||
}
|
||||
|
||||
public async Task<Pack?> GetPackByNameAsync(
|
||||
string tenantId,
|
||||
string name,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
var sql = $"SELECT {PackColumns} FROM packs WHERE tenant_id = @tenant_id AND name = @name";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("name", name.ToLowerInvariant());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapPack(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Pack>> ListPacksAsync(
|
||||
string tenantId,
|
||||
string? projectId,
|
||||
PackStatus? status,
|
||||
string? searchTerm,
|
||||
string? tag,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"SELECT {PackColumns} FROM packs WHERE tenant_id = @tenant_id";
|
||||
var parameters = new List<NpgsqlParameter>
|
||||
{
|
||||
new("tenant_id", tenantId)
|
||||
};
|
||||
|
||||
if (projectId is not null)
|
||||
{
|
||||
sql += " AND project_id = @project_id";
|
||||
parameters.Add(new("project_id", projectId));
|
||||
}
|
||||
|
||||
if (status.HasValue)
|
||||
{
|
||||
sql += " AND status = @status";
|
||||
parameters.Add(new("status", status.Value.ToString().ToLowerInvariant()));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(searchTerm))
|
||||
{
|
||||
sql += " AND (name ILIKE @search OR display_name ILIKE @search OR description ILIKE @search)";
|
||||
parameters.Add(new("search", $"%{searchTerm}%"));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tag))
|
||||
{
|
||||
sql += " AND tags ILIKE @tag";
|
||||
parameters.Add(new("tag", $"%{tag}%"));
|
||||
}
|
||||
|
||||
sql += " ORDER BY updated_at DESC LIMIT @limit OFFSET @offset";
|
||||
parameters.Add(new("limit", Math.Min(limit, 100)));
|
||||
parameters.Add(new("offset", offset));
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddRange(parameters.ToArray());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
var results = new List<Pack>();
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapPack(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<int> CountPacksAsync(
|
||||
string tenantId,
|
||||
string? projectId,
|
||||
PackStatus? status,
|
||||
string? searchTerm,
|
||||
string? tag,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = "SELECT COUNT(*) FROM packs WHERE tenant_id = @tenant_id";
|
||||
var parameters = new List<NpgsqlParameter>
|
||||
{
|
||||
new("tenant_id", tenantId)
|
||||
};
|
||||
|
||||
if (projectId is not null)
|
||||
{
|
||||
sql += " AND project_id = @project_id";
|
||||
parameters.Add(new("project_id", projectId));
|
||||
}
|
||||
|
||||
if (status.HasValue)
|
||||
{
|
||||
sql += " AND status = @status";
|
||||
parameters.Add(new("status", status.Value.ToString().ToLowerInvariant()));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(searchTerm))
|
||||
{
|
||||
sql += " AND (name ILIKE @search OR display_name ILIKE @search OR description ILIKE @search)";
|
||||
parameters.Add(new("search", $"%{searchTerm}%"));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tag))
|
||||
{
|
||||
sql += " AND tags ILIKE @tag";
|
||||
parameters.Add(new("tag", $"%{tag}%"));
|
||||
}
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddRange(parameters.ToArray());
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken);
|
||||
return Convert.ToInt32(result);
|
||||
}
|
||||
|
||||
public async Task CreatePackAsync(Pack pack, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(pack.TenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO packs (
|
||||
pack_id, tenant_id, project_id, name, display_name, description,
|
||||
status, created_by, created_at, updated_at, updated_by,
|
||||
metadata, tags, icon_uri, version_count, latest_version,
|
||||
published_at, published_by)
|
||||
VALUES (
|
||||
@pack_id, @tenant_id, @project_id, @name, @display_name, @description,
|
||||
@status::pack_status, @created_by, @created_at, @updated_at, @updated_by,
|
||||
@metadata, @tags, @icon_uri, @version_count, @latest_version,
|
||||
@published_at, @published_by)
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
AddPackParameters(command, pack);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task UpdatePackAsync(Pack pack, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(pack.TenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
UPDATE packs SET
|
||||
display_name = @display_name,
|
||||
description = @description,
|
||||
status = @status::pack_status,
|
||||
updated_at = @updated_at,
|
||||
updated_by = @updated_by,
|
||||
metadata = @metadata,
|
||||
tags = @tags,
|
||||
icon_uri = @icon_uri,
|
||||
version_count = @version_count,
|
||||
latest_version = @latest_version,
|
||||
published_at = @published_at,
|
||||
published_by = @published_by
|
||||
WHERE tenant_id = @tenant_id AND pack_id = @pack_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
AddPackParameters(command, pack);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task UpdatePackStatusAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
PackStatus status,
|
||||
string updatedBy,
|
||||
DateTimeOffset? publishedAt,
|
||||
string? publishedBy,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
UPDATE packs SET
|
||||
status = @status::pack_status,
|
||||
updated_at = @updated_at,
|
||||
updated_by = @updated_by,
|
||||
published_at = COALESCE(@published_at, published_at),
|
||||
published_by = COALESCE(@published_by, published_by)
|
||||
WHERE tenant_id = @tenant_id AND pack_id = @pack_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
command.Parameters.AddWithValue("status", status.ToString().ToLowerInvariant());
|
||||
command.Parameters.AddWithValue("updated_at", DateTimeOffset.UtcNow.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_by", updatedBy);
|
||||
command.Parameters.AddWithValue("published_at", (object?)publishedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("published_by", (object?)publishedBy ?? DBNull.Value);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task<bool> DeletePackAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
DELETE FROM packs
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND pack_id = @pack_id
|
||||
AND status = 'draft'::pack_status
|
||||
AND version_count = 0
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
|
||||
var rows = await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
// Pack version operations
|
||||
|
||||
public async Task<PackVersion?> GetVersionByIdAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
var sql = $"SELECT {VersionColumns} FROM pack_versions WHERE tenant_id = @tenant_id AND pack_version_id = @pack_version_id";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_version_id", packVersionId);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapPackVersion(reader);
|
||||
}
|
||||
|
||||
public async Task<PackVersion?> GetVersionAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
string version,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
var sql = $"SELECT {VersionColumns} FROM pack_versions WHERE tenant_id = @tenant_id AND pack_id = @pack_id AND version = @version";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
command.Parameters.AddWithValue("version", version);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapPackVersion(reader);
|
||||
}
|
||||
|
||||
public async Task<PackVersion?> GetLatestVersionAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
bool includePrerelease,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"""
|
||||
SELECT {VersionColumns}
|
||||
FROM pack_versions
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND pack_id = @pack_id
|
||||
AND status = 'published'::pack_version_status
|
||||
""";
|
||||
|
||||
if (!includePrerelease)
|
||||
{
|
||||
sql += " AND sem_ver NOT LIKE '%-%'";
|
||||
}
|
||||
|
||||
sql += " ORDER BY created_at DESC LIMIT 1";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapPackVersion(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PackVersion>> ListVersionsAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
PackVersionStatus? status,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"SELECT {VersionColumns} FROM pack_versions WHERE tenant_id = @tenant_id AND pack_id = @pack_id";
|
||||
|
||||
if (status.HasValue)
|
||||
{
|
||||
sql += " AND status = @status::pack_version_status";
|
||||
}
|
||||
|
||||
sql += " ORDER BY created_at DESC LIMIT @limit OFFSET @offset";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
if (status.HasValue)
|
||||
{
|
||||
command.Parameters.AddWithValue("status", status.Value.ToString().ToLowerInvariant());
|
||||
}
|
||||
command.Parameters.AddWithValue("limit", Math.Min(limit, 100));
|
||||
command.Parameters.AddWithValue("offset", offset);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
var results = new List<PackVersion>();
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapPackVersion(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<int> CountVersionsAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
PackVersionStatus? status,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = "SELECT COUNT(*) FROM pack_versions WHERE tenant_id = @tenant_id AND pack_id = @pack_id";
|
||||
|
||||
if (status.HasValue)
|
||||
{
|
||||
sql += " AND status = @status::pack_version_status";
|
||||
}
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
if (status.HasValue)
|
||||
{
|
||||
command.Parameters.AddWithValue("status", status.Value.ToString().ToLowerInvariant());
|
||||
}
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken);
|
||||
return Convert.ToInt32(result);
|
||||
}
|
||||
|
||||
public async Task CreateVersionAsync(PackVersion version, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(version.TenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO pack_versions (
|
||||
pack_version_id, tenant_id, pack_id, version, sem_ver, status,
|
||||
artifact_uri, artifact_digest, artifact_mime_type, artifact_size_bytes,
|
||||
manifest_json, manifest_digest, release_notes, min_engine_version, dependencies,
|
||||
created_by, created_at, updated_at, updated_by,
|
||||
published_at, published_by, deprecated_at, deprecated_by, deprecation_reason,
|
||||
signature_uri, signature_algorithm, signed_by, signed_at,
|
||||
metadata, download_count)
|
||||
VALUES (
|
||||
@pack_version_id, @tenant_id, @pack_id, @version, @sem_ver, @status::pack_version_status,
|
||||
@artifact_uri, @artifact_digest, @artifact_mime_type, @artifact_size_bytes,
|
||||
@manifest_json, @manifest_digest, @release_notes, @min_engine_version, @dependencies,
|
||||
@created_by, @created_at, @updated_at, @updated_by,
|
||||
@published_at, @published_by, @deprecated_at, @deprecated_by, @deprecation_reason,
|
||||
@signature_uri, @signature_algorithm, @signed_by, @signed_at,
|
||||
@metadata, @download_count)
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
AddVersionParameters(command, version);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task UpdateVersionAsync(PackVersion version, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(version.TenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
UPDATE pack_versions SET
|
||||
status = @status::pack_version_status,
|
||||
release_notes = @release_notes,
|
||||
min_engine_version = @min_engine_version,
|
||||
dependencies = @dependencies,
|
||||
updated_at = @updated_at,
|
||||
updated_by = @updated_by,
|
||||
published_at = @published_at,
|
||||
published_by = @published_by,
|
||||
deprecated_at = @deprecated_at,
|
||||
deprecated_by = @deprecated_by,
|
||||
deprecation_reason = @deprecation_reason,
|
||||
signature_uri = @signature_uri,
|
||||
signature_algorithm = @signature_algorithm,
|
||||
signed_by = @signed_by,
|
||||
signed_at = @signed_at,
|
||||
metadata = @metadata
|
||||
WHERE tenant_id = @tenant_id AND pack_version_id = @pack_version_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
AddVersionParameters(command, version);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task UpdateVersionStatusAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
PackVersionStatus status,
|
||||
string updatedBy,
|
||||
DateTimeOffset? publishedAt,
|
||||
string? publishedBy,
|
||||
DateTimeOffset? deprecatedAt,
|
||||
string? deprecatedBy,
|
||||
string? deprecationReason,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
UPDATE pack_versions SET
|
||||
status = @status::pack_version_status,
|
||||
updated_at = @updated_at,
|
||||
updated_by = @updated_by,
|
||||
published_at = COALESCE(@published_at, published_at),
|
||||
published_by = COALESCE(@published_by, published_by),
|
||||
deprecated_at = COALESCE(@deprecated_at, deprecated_at),
|
||||
deprecated_by = COALESCE(@deprecated_by, deprecated_by),
|
||||
deprecation_reason = COALESCE(@deprecation_reason, deprecation_reason)
|
||||
WHERE tenant_id = @tenant_id AND pack_version_id = @pack_version_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_version_id", packVersionId);
|
||||
command.Parameters.AddWithValue("status", status.ToString().ToLowerInvariant());
|
||||
command.Parameters.AddWithValue("updated_at", DateTimeOffset.UtcNow.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_by", updatedBy);
|
||||
command.Parameters.AddWithValue("published_at", (object?)publishedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("published_by", (object?)publishedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("deprecated_at", (object?)deprecatedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("deprecated_by", (object?)deprecatedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("deprecation_reason", (object?)deprecationReason ?? DBNull.Value);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task UpdateVersionSignatureAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
string signatureUri,
|
||||
string signatureAlgorithm,
|
||||
string signedBy,
|
||||
DateTimeOffset signedAt,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
UPDATE pack_versions SET
|
||||
signature_uri = @signature_uri,
|
||||
signature_algorithm = @signature_algorithm,
|
||||
signed_by = @signed_by,
|
||||
signed_at = @signed_at,
|
||||
updated_at = @updated_at,
|
||||
updated_by = @signed_by
|
||||
WHERE tenant_id = @tenant_id AND pack_version_id = @pack_version_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_version_id", packVersionId);
|
||||
command.Parameters.AddWithValue("signature_uri", signatureUri);
|
||||
command.Parameters.AddWithValue("signature_algorithm", signatureAlgorithm);
|
||||
command.Parameters.AddWithValue("signed_by", signedBy);
|
||||
command.Parameters.AddWithValue("signed_at", signedAt.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_at", DateTimeOffset.UtcNow.UtcDateTime);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task IncrementDownloadCountAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
UPDATE pack_versions SET download_count = download_count + 1
|
||||
WHERE tenant_id = @tenant_id AND pack_version_id = @pack_version_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_version_id", packVersionId);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteVersionAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
DELETE FROM pack_versions
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND pack_version_id = @pack_version_id
|
||||
AND status = 'draft'::pack_version_status
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_version_id", packVersionId);
|
||||
|
||||
var rows = await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
// Search and discovery
|
||||
|
||||
public async Task<IReadOnlyList<Pack>> SearchPacksAsync(
|
||||
string tenantId,
|
||||
string query,
|
||||
PackStatus? status,
|
||||
int limit,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"""
|
||||
SELECT {PackColumns}
|
||||
FROM packs
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND (name ILIKE @query OR display_name ILIKE @query OR description ILIKE @query OR tags ILIKE @query)
|
||||
""";
|
||||
|
||||
if (status.HasValue)
|
||||
{
|
||||
sql += " AND status = @status::pack_status";
|
||||
}
|
||||
|
||||
sql += " ORDER BY updated_at DESC LIMIT @limit";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("query", $"%{query}%");
|
||||
if (status.HasValue)
|
||||
{
|
||||
command.Parameters.AddWithValue("status", status.Value.ToString().ToLowerInvariant());
|
||||
}
|
||||
command.Parameters.AddWithValue("limit", Math.Min(limit, 100));
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
var results = new List<Pack>();
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapPack(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Pack>> GetPacksByTagAsync(
|
||||
string tenantId,
|
||||
string tag,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"""
|
||||
SELECT {PackColumns}
|
||||
FROM packs
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND tags ILIKE @tag
|
||||
AND status = 'published'::pack_status
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT @limit OFFSET @offset
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("tag", $"%{tag}%");
|
||||
command.Parameters.AddWithValue("limit", Math.Min(limit, 100));
|
||||
command.Parameters.AddWithValue("offset", offset);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
var results = new List<Pack>();
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapPack(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Pack>> GetPopularPacksAsync(
|
||||
string tenantId,
|
||||
int limit,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"""
|
||||
SELECT p.{PackColumns.Replace("pack_id", "p.pack_id")}
|
||||
FROM packs p
|
||||
LEFT JOIN (
|
||||
SELECT pack_id, SUM(download_count) AS total_downloads
|
||||
FROM pack_versions
|
||||
WHERE tenant_id = @tenant_id
|
||||
GROUP BY pack_id
|
||||
) v ON p.pack_id = v.pack_id
|
||||
WHERE p.tenant_id = @tenant_id
|
||||
AND p.status = 'published'::pack_status
|
||||
ORDER BY COALESCE(v.total_downloads, 0) DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("limit", Math.Min(limit, 100));
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
var results = new List<Pack>();
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapPack(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Pack>> GetRecentPacksAsync(
|
||||
string tenantId,
|
||||
int limit,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
var sql = $"""
|
||||
SELECT {PackColumns}
|
||||
FROM packs
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND status = 'published'::pack_status
|
||||
ORDER BY published_at DESC NULLS LAST, updated_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("limit", Math.Min(limit, 100));
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
var results = new List<Pack>();
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapPack(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// Statistics
|
||||
|
||||
public async Task<long> GetPackTotalDownloadsAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
SELECT COALESCE(SUM(download_count), 0)
|
||||
FROM pack_versions
|
||||
WHERE tenant_id = @tenant_id AND pack_id = @pack_id
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("pack_id", packId);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken);
|
||||
return Convert.ToInt64(result);
|
||||
}
|
||||
|
||||
public async Task<PackRegistryStats> GetStatsAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken);
|
||||
|
||||
const string sql = """
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM packs WHERE tenant_id = @tenant_id) AS total_packs,
|
||||
(SELECT COUNT(*) FROM packs WHERE tenant_id = @tenant_id AND status = 'published'::pack_status) AS published_packs,
|
||||
(SELECT COUNT(*) FROM pack_versions WHERE tenant_id = @tenant_id) AS total_versions,
|
||||
(SELECT COUNT(*) FROM pack_versions WHERE tenant_id = @tenant_id AND status = 'published'::pack_version_status) AS published_versions,
|
||||
(SELECT COALESCE(SUM(download_count), 0) FROM pack_versions WHERE tenant_id = @tenant_id) AS total_downloads,
|
||||
(SELECT MAX(updated_at) FROM packs WHERE tenant_id = @tenant_id) AS last_updated_at
|
||||
""";
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return new PackRegistryStats(0, 0, 0, 0, 0, null);
|
||||
}
|
||||
|
||||
return new PackRegistryStats(
|
||||
TotalPacks: reader.GetInt32(0),
|
||||
PublishedPacks: reader.GetInt32(1),
|
||||
TotalVersions: reader.GetInt32(2),
|
||||
PublishedVersions: reader.GetInt32(3),
|
||||
TotalDownloads: reader.GetInt64(4),
|
||||
LastUpdatedAt: reader.IsDBNull(5) ? null : new DateTimeOffset(reader.GetDateTime(5), TimeSpan.Zero));
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
private void AddPackParameters(NpgsqlCommand command, Pack pack)
|
||||
{
|
||||
command.Parameters.AddWithValue("pack_id", pack.PackId);
|
||||
command.Parameters.AddWithValue("tenant_id", pack.TenantId);
|
||||
command.Parameters.AddWithValue("project_id", (object?)pack.ProjectId ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("name", pack.Name);
|
||||
command.Parameters.AddWithValue("display_name", pack.DisplayName);
|
||||
command.Parameters.AddWithValue("description", (object?)pack.Description ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("status", pack.Status.ToString().ToLowerInvariant());
|
||||
command.Parameters.AddWithValue("created_by", pack.CreatedBy);
|
||||
command.Parameters.AddWithValue("created_at", pack.CreatedAt.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_at", pack.UpdatedAt.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_by", (object?)pack.UpdatedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("metadata", (object?)pack.Metadata ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("tags", (object?)pack.Tags ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("icon_uri", (object?)pack.IconUri ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("version_count", pack.VersionCount);
|
||||
command.Parameters.AddWithValue("latest_version", (object?)pack.LatestVersion ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("published_at", (object?)pack.PublishedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("published_by", (object?)pack.PublishedBy ?? DBNull.Value);
|
||||
}
|
||||
|
||||
private void AddVersionParameters(NpgsqlCommand command, PackVersion version)
|
||||
{
|
||||
command.Parameters.AddWithValue("pack_version_id", version.PackVersionId);
|
||||
command.Parameters.AddWithValue("tenant_id", version.TenantId);
|
||||
command.Parameters.AddWithValue("pack_id", version.PackId);
|
||||
command.Parameters.AddWithValue("version", version.Version);
|
||||
command.Parameters.AddWithValue("sem_ver", (object?)version.SemVer ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("status", version.Status.ToString().ToLowerInvariant());
|
||||
command.Parameters.AddWithValue("artifact_uri", version.ArtifactUri);
|
||||
command.Parameters.AddWithValue("artifact_digest", version.ArtifactDigest);
|
||||
command.Parameters.AddWithValue("artifact_mime_type", (object?)version.ArtifactMimeType ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("artifact_size_bytes", (object?)version.ArtifactSizeBytes ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("manifest_json", (object?)version.ManifestJson ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("manifest_digest", (object?)version.ManifestDigest ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("release_notes", (object?)version.ReleaseNotes ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("min_engine_version", (object?)version.MinEngineVersion ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("dependencies", (object?)version.Dependencies ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("created_by", version.CreatedBy);
|
||||
command.Parameters.AddWithValue("created_at", version.CreatedAt.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_at", version.UpdatedAt.UtcDateTime);
|
||||
command.Parameters.AddWithValue("updated_by", (object?)version.UpdatedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("published_at", (object?)version.PublishedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("published_by", (object?)version.PublishedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("deprecated_at", (object?)version.DeprecatedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("deprecated_by", (object?)version.DeprecatedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("deprecation_reason", (object?)version.DeprecationReason ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("signature_uri", (object?)version.SignatureUri ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("signature_algorithm", (object?)version.SignatureAlgorithm ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("signed_by", (object?)version.SignedBy ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("signed_at", (object?)version.SignedAt?.UtcDateTime ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("metadata", (object?)version.Metadata ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("download_count", version.DownloadCount);
|
||||
}
|
||||
|
||||
private static Pack MapPack(NpgsqlDataReader reader)
|
||||
{
|
||||
return new Pack(
|
||||
PackId: reader.GetGuid(0),
|
||||
TenantId: reader.GetString(1),
|
||||
ProjectId: reader.IsDBNull(2) ? null : reader.GetString(2),
|
||||
Name: reader.GetString(3),
|
||||
DisplayName: reader.GetString(4),
|
||||
Description: reader.IsDBNull(5) ? null : reader.GetString(5),
|
||||
Status: Enum.Parse<PackStatus>(reader.GetString(6), ignoreCase: true),
|
||||
CreatedBy: reader.GetString(7),
|
||||
CreatedAt: new DateTimeOffset(reader.GetDateTime(8), TimeSpan.Zero),
|
||||
UpdatedAt: new DateTimeOffset(reader.GetDateTime(9), TimeSpan.Zero),
|
||||
UpdatedBy: reader.IsDBNull(10) ? null : reader.GetString(10),
|
||||
Metadata: reader.IsDBNull(11) ? null : reader.GetString(11),
|
||||
Tags: reader.IsDBNull(12) ? null : reader.GetString(12),
|
||||
IconUri: reader.IsDBNull(13) ? null : reader.GetString(13),
|
||||
VersionCount: reader.GetInt32(14),
|
||||
LatestVersion: reader.IsDBNull(15) ? null : reader.GetString(15),
|
||||
PublishedAt: reader.IsDBNull(16) ? null : new DateTimeOffset(reader.GetDateTime(16), TimeSpan.Zero),
|
||||
PublishedBy: reader.IsDBNull(17) ? null : reader.GetString(17));
|
||||
}
|
||||
|
||||
private static PackVersion MapPackVersion(NpgsqlDataReader reader)
|
||||
{
|
||||
return new PackVersion(
|
||||
PackVersionId: reader.GetGuid(0),
|
||||
TenantId: reader.GetString(1),
|
||||
PackId: reader.GetGuid(2),
|
||||
Version: reader.GetString(3),
|
||||
SemVer: reader.IsDBNull(4) ? null : reader.GetString(4),
|
||||
Status: Enum.Parse<PackVersionStatus>(reader.GetString(5), ignoreCase: true),
|
||||
ArtifactUri: reader.GetString(6),
|
||||
ArtifactDigest: reader.GetString(7),
|
||||
ArtifactMimeType: reader.IsDBNull(8) ? null : reader.GetString(8),
|
||||
ArtifactSizeBytes: reader.IsDBNull(9) ? null : reader.GetInt64(9),
|
||||
ManifestJson: reader.IsDBNull(10) ? null : reader.GetString(10),
|
||||
ManifestDigest: reader.IsDBNull(11) ? null : reader.GetString(11),
|
||||
ReleaseNotes: reader.IsDBNull(12) ? null : reader.GetString(12),
|
||||
MinEngineVersion: reader.IsDBNull(13) ? null : reader.GetString(13),
|
||||
Dependencies: reader.IsDBNull(14) ? null : reader.GetString(14),
|
||||
CreatedBy: reader.GetString(15),
|
||||
CreatedAt: new DateTimeOffset(reader.GetDateTime(16), TimeSpan.Zero),
|
||||
UpdatedAt: new DateTimeOffset(reader.GetDateTime(17), TimeSpan.Zero),
|
||||
UpdatedBy: reader.IsDBNull(18) ? null : reader.GetString(18),
|
||||
PublishedAt: reader.IsDBNull(19) ? null : new DateTimeOffset(reader.GetDateTime(19), TimeSpan.Zero),
|
||||
PublishedBy: reader.IsDBNull(20) ? null : reader.GetString(20),
|
||||
DeprecatedAt: reader.IsDBNull(21) ? null : new DateTimeOffset(reader.GetDateTime(21), TimeSpan.Zero),
|
||||
DeprecatedBy: reader.IsDBNull(22) ? null : reader.GetString(22),
|
||||
DeprecationReason: reader.IsDBNull(23) ? null : reader.GetString(23),
|
||||
SignatureUri: reader.IsDBNull(24) ? null : reader.GetString(24),
|
||||
SignatureAlgorithm: reader.IsDBNull(25) ? null : reader.GetString(25),
|
||||
SignedBy: reader.IsDBNull(26) ? null : reader.GetString(26),
|
||||
SignedAt: reader.IsDBNull(27) ? null : new DateTimeOffset(reader.GetDateTime(27), TimeSpan.Zero),
|
||||
Metadata: reader.IsDBNull(28) ? null : reader.GetString(28),
|
||||
DownloadCount: reader.GetInt32(29));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Infrastructure.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for pack registry operations.
|
||||
/// Per 150.B-PacksRegistry: Registry API for pack CRUD operations.
|
||||
/// </summary>
|
||||
public interface IPackRegistryRepository
|
||||
{
|
||||
// Pack CRUD operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets a pack by ID.
|
||||
/// </summary>
|
||||
Task<Pack?> GetPackByIdAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a pack by name.
|
||||
/// </summary>
|
||||
Task<Pack?> GetPackByNameAsync(
|
||||
string tenantId,
|
||||
string name,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists packs with optional filters.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<Pack>> ListPacksAsync(
|
||||
string tenantId,
|
||||
string? projectId,
|
||||
PackStatus? status,
|
||||
string? searchTerm,
|
||||
string? tag,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Counts packs matching filters.
|
||||
/// </summary>
|
||||
Task<int> CountPacksAsync(
|
||||
string tenantId,
|
||||
string? projectId,
|
||||
PackStatus? status,
|
||||
string? searchTerm,
|
||||
string? tag,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new pack.
|
||||
/// </summary>
|
||||
Task CreatePackAsync(Pack pack, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates a pack.
|
||||
/// </summary>
|
||||
Task UpdatePackAsync(Pack pack, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates pack status.
|
||||
/// </summary>
|
||||
Task UpdatePackStatusAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
PackStatus status,
|
||||
string updatedBy,
|
||||
DateTimeOffset? publishedAt,
|
||||
string? publishedBy,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a pack (only allowed for draft packs with no versions).
|
||||
/// </summary>
|
||||
Task<bool> DeletePackAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
// Pack version operations
|
||||
|
||||
/// <summary>
|
||||
/// Gets a pack version by ID.
|
||||
/// </summary>
|
||||
Task<PackVersion?> GetVersionByIdAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a pack version by pack ID and version string.
|
||||
/// </summary>
|
||||
Task<PackVersion?> GetVersionAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
string version,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the latest published version for a pack.
|
||||
/// </summary>
|
||||
Task<PackVersion?> GetLatestVersionAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
bool includePrerelease,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists versions for a pack.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<PackVersion>> ListVersionsAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
PackVersionStatus? status,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Counts versions for a pack.
|
||||
/// </summary>
|
||||
Task<int> CountVersionsAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
PackVersionStatus? status,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new pack version.
|
||||
/// </summary>
|
||||
Task CreateVersionAsync(PackVersion version, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates a pack version.
|
||||
/// </summary>
|
||||
Task UpdateVersionAsync(PackVersion version, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates version status.
|
||||
/// </summary>
|
||||
Task UpdateVersionStatusAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
PackVersionStatus status,
|
||||
string updatedBy,
|
||||
DateTimeOffset? publishedAt,
|
||||
string? publishedBy,
|
||||
DateTimeOffset? deprecatedAt,
|
||||
string? deprecatedBy,
|
||||
string? deprecationReason,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Updates version signature.
|
||||
/// </summary>
|
||||
Task UpdateVersionSignatureAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
string signatureUri,
|
||||
string signatureAlgorithm,
|
||||
string signedBy,
|
||||
DateTimeOffset signedAt,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Increments download count for a version.
|
||||
/// </summary>
|
||||
Task IncrementDownloadCountAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a pack version (only allowed for draft versions).
|
||||
/// </summary>
|
||||
Task<bool> DeleteVersionAsync(
|
||||
string tenantId,
|
||||
Guid packVersionId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
// Search and discovery
|
||||
|
||||
/// <summary>
|
||||
/// Searches packs by name, description, or tags.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<Pack>> SearchPacksAsync(
|
||||
string tenantId,
|
||||
string query,
|
||||
PackStatus? status,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets packs by tag.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<Pack>> GetPacksByTagAsync(
|
||||
string tenantId,
|
||||
string tag,
|
||||
int limit,
|
||||
int offset,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets popular packs by download count.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<Pack>> GetPopularPacksAsync(
|
||||
string tenantId,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets recently updated packs.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<Pack>> GetRecentPacksAsync(
|
||||
string tenantId,
|
||||
int limit,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
// Statistics
|
||||
|
||||
/// <summary>
|
||||
/// Gets total download count for a pack (all versions).
|
||||
/// </summary>
|
||||
Task<long> GetPackTotalDownloadsAsync(
|
||||
string tenantId,
|
||||
Guid packId,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Gets registry statistics for a tenant.
|
||||
/// </summary>
|
||||
Task<PackRegistryStats> GetStatsAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics for the pack registry.
|
||||
/// </summary>
|
||||
public sealed record PackRegistryStats(
|
||||
int TotalPacks,
|
||||
int PublishedPacks,
|
||||
int TotalVersions,
|
||||
int PublishedVersions,
|
||||
long TotalDownloads,
|
||||
DateTimeOffset? LastUpdatedAt);
|
||||
@@ -1,7 +1,9 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Orchestrator.Core.Backfill;
|
||||
using StellaOps.Orchestrator.Core.Observability;
|
||||
using StellaOps.Orchestrator.Infrastructure.Ledger;
|
||||
using StellaOps.Orchestrator.Infrastructure.Observability;
|
||||
using StellaOps.Orchestrator.Infrastructure.Options;
|
||||
using StellaOps.Orchestrator.Infrastructure.Postgres;
|
||||
using StellaOps.Orchestrator.Infrastructure.Repositories;
|
||||
@@ -41,6 +43,7 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<Infrastructure.Repositories.IBackfillRepository, PostgresBackfillRepository>();
|
||||
services.AddScoped<IPackRunRepository, PostgresPackRunRepository>();
|
||||
services.AddScoped<IPackRunLogRepository, PostgresPackRunLogRepository>();
|
||||
services.AddScoped<IPackRegistryRepository, PostgresPackRegistryRepository>();
|
||||
|
||||
// Register audit and ledger repositories
|
||||
services.AddScoped<IAuditRepository, PostgresAuditRepository>();
|
||||
@@ -54,6 +57,16 @@ public static class ServiceCollectionExtensions
|
||||
// Register duplicate suppression factory
|
||||
services.AddSingleton<IDuplicateSuppressorFactory, PostgresDuplicateSuppressorFactory>();
|
||||
|
||||
// Register golden signals metrics (per ORCH-OBS-51-001)
|
||||
services.AddSingleton<OrchestratorGoldenSignals>();
|
||||
|
||||
// Register incident mode hooks (per ORCH-OBS-55-001)
|
||||
var incidentModeOptions = configuration
|
||||
.GetSection(IncidentModeHooksOptions.SectionName)
|
||||
.Get<IncidentModeHooksOptions>() ?? new IncidentModeHooksOptions();
|
||||
services.AddSingleton(incidentModeOptions);
|
||||
services.AddSingleton<IIncidentModeHooks, IncidentModeHooks>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/>
|
||||
<ProjectReference Include="..\..\..\Telemetry\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core.csproj"/>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,477 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Orchestrator.Core.AirGap;
|
||||
using StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.AirGap;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for NetworkIntentValidator.
|
||||
/// Per ORCH-AIRGAP-56-001.
|
||||
/// </summary>
|
||||
public class NetworkIntentValidatorTests
|
||||
{
|
||||
private readonly NetworkIntentValidator _sut;
|
||||
|
||||
public NetworkIntentValidatorTests()
|
||||
{
|
||||
_sut = new NetworkIntentValidator(NullLogger<NetworkIntentValidator>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenEnforcementDisabled_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """{"destinationUri": "https://external.example.com/api"}""";
|
||||
var config = NetworkIntentConfig.Disabled;
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("export.sbom", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.False(result.ShouldBlock);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenNotSealedAndNoRequireExplicit_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """{"destinationUri": "https://external.example.com/api"}""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Strict,
|
||||
RequireExplicitIntents: false);
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("export.sbom", payload, config, isSealed: false);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenNoNetworkEndpointsDetected_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """{"data": "some local data", "count": 42}""";
|
||||
var config = NetworkIntentConfig.Strict;
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("process.data", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenMissingIntents_ReturnsMissingIntentsError()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """{"destinationUri": "https://external.example.com/api"}""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Warn,
|
||||
RequireExplicitIntents: true);
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("export.sbom", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("NETWORK_INTENT_MISSING", result.ErrorCode);
|
||||
Assert.Single(result.Violations);
|
||||
Assert.Equal(NetworkViolationType.MissingIntent, result.Violations[0].ViolationType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenDeclaredIntentsMatchAllowlist_ReturnsSuccess()
|
||||
{
|
||||
// Arrange - payload without URL fields that would be detected, only declared intents
|
||||
var payload = """
|
||||
{
|
||||
"format": "spdx",
|
||||
"networkIntents": [
|
||||
{"host": "allowed.example.com", "port": 443, "protocol": "https", "purpose": "export"}
|
||||
]
|
||||
}
|
||||
""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Strict,
|
||||
Allowlist: [new NetworkAllowlistEntry("allowed.example.com", 443, "https")],
|
||||
RequireExplicitIntents: false); // Don't require explicit intents match
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("export.sbom", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenDeclaredIntentsNotInAllowlist_ReturnsDisallowed()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """
|
||||
{
|
||||
"destinationUri": "https://disallowed.example.com/api",
|
||||
"networkIntents": [
|
||||
{"host": "disallowed.example.com", "port": 443, "protocol": "https", "purpose": "export"}
|
||||
]
|
||||
}
|
||||
""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Strict,
|
||||
Allowlist: [new NetworkAllowlistEntry("allowed.example.com", 443, "https")],
|
||||
RequireExplicitIntents: true);
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("export.sbom", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("NETWORK_INTENT_DISALLOWED", result.ErrorCode);
|
||||
Assert.True(result.ShouldBlock);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenWarnMode_DoesNotBlock()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """
|
||||
{
|
||||
"destinationUri": "https://disallowed.example.com/api",
|
||||
"networkIntents": [
|
||||
{"host": "disallowed.example.com", "port": 443, "protocol": "https", "purpose": "export"}
|
||||
]
|
||||
}
|
||||
""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Warn,
|
||||
Allowlist: [new NetworkAllowlistEntry("allowed.example.com", 443, "https")],
|
||||
RequireExplicitIntents: true);
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("export.sbom", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.False(result.ShouldBlock);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenWildcardAllowlist_AllowsSubdomains()
|
||||
{
|
||||
// Arrange - payload without URL fields, just declared intents
|
||||
var payload = """
|
||||
{
|
||||
"data": "test",
|
||||
"networkIntents": [
|
||||
{"host": "api.example.com", "port": 443, "protocol": "https", "purpose": "api"}
|
||||
]
|
||||
}
|
||||
""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Strict,
|
||||
Allowlist: [new NetworkAllowlistEntry("*.example.com", null, "https")],
|
||||
RequireExplicitIntents: false);
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("fetch.data", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateForJob_WhenBlockedProtocol_ReturnsViolation()
|
||||
{
|
||||
// Arrange - payload without URL fields, just declared intents with blocked protocol
|
||||
var payload = """
|
||||
{
|
||||
"data": "test",
|
||||
"networkIntents": [
|
||||
{"host": "insecure.example.com", "port": 80, "protocol": "http", "purpose": "legacy"}
|
||||
]
|
||||
}
|
||||
""";
|
||||
var config = new NetworkIntentConfig(
|
||||
EnforcementMode.Strict,
|
||||
Allowlist: [new NetworkAllowlistEntry("insecure.example.com")],
|
||||
BlockedProtocols: ["http"],
|
||||
RequireExplicitIntents: false);
|
||||
|
||||
// Act
|
||||
var result = _sut.ValidateForJob("legacy.api", payload, config, isSealed: true);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Violations, v => v.ViolationType == NetworkViolationType.BlockedProtocol);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractNetworkEndpoints_ExtractsFromCommonFields()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """
|
||||
{
|
||||
"destinationUri": "https://dest.example.com/path",
|
||||
"callbackUrl": "https://callback.example.com/hook",
|
||||
"webhookUrl": "https://webhook.example.com/notify",
|
||||
"data": "not a url"
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
var endpoints = _sut.ExtractNetworkEndpoints(payload);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(3, endpoints.Count);
|
||||
Assert.Contains("https://dest.example.com/path", endpoints);
|
||||
Assert.Contains("https://callback.example.com/hook", endpoints);
|
||||
Assert.Contains("https://webhook.example.com/notify", endpoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractNetworkEndpoints_IgnoresNonUrlStrings()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """
|
||||
{
|
||||
"name": "test-job",
|
||||
"description": "A test job for processing",
|
||||
"count": 42
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
var endpoints = _sut.ExtractNetworkEndpoints(payload);
|
||||
|
||||
// Assert
|
||||
Assert.Empty(endpoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractDeclaredIntents_ParsesNetworkIntentsArray()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """
|
||||
{
|
||||
"networkIntents": [
|
||||
{"host": "api.example.com", "port": 443, "protocol": "https", "purpose": "API calls"},
|
||||
{"host": "metrics.example.com", "port": 8080, "protocol": "grpc", "purpose": "Metrics export"}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
var intents = _sut.ExtractDeclaredIntents(payload);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, intents.Count);
|
||||
|
||||
var apiIntent = intents.First(i => i.Host == "api.example.com");
|
||||
Assert.Equal(443, apiIntent.Port);
|
||||
Assert.Equal("https", apiIntent.Protocol);
|
||||
Assert.Equal("API calls", apiIntent.Purpose);
|
||||
|
||||
var metricsIntent = intents.First(i => i.Host == "metrics.example.com");
|
||||
Assert.Equal(8080, metricsIntent.Port);
|
||||
Assert.Equal("grpc", metricsIntent.Protocol);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractDeclaredIntents_ReturnsEmptyWhenNoIntentsDeclared()
|
||||
{
|
||||
// Arrange
|
||||
var payload = """{"destinationUri": "https://example.com/api"}""";
|
||||
|
||||
// Act
|
||||
var intents = _sut.ExtractDeclaredIntents(payload);
|
||||
|
||||
// Assert
|
||||
Assert.Empty(intents);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for NetworkIntent model.
|
||||
/// </summary>
|
||||
public class NetworkIntentTests
|
||||
{
|
||||
[Fact]
|
||||
public void HttpsEgress_CreatesCorrectIntent()
|
||||
{
|
||||
// Act
|
||||
var intent = NetworkIntent.HttpsEgress("api.example.com", "API access");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("api.example.com", intent.Host);
|
||||
Assert.Equal(443, intent.Port);
|
||||
Assert.Equal("https", intent.Protocol);
|
||||
Assert.Equal("API access", intent.Purpose);
|
||||
Assert.Equal(NetworkDirection.Egress, intent.Direction);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpEgress_CreatesCorrectIntent()
|
||||
{
|
||||
// Act
|
||||
var intent = NetworkIntent.HttpEgress("legacy.example.com", "Legacy API", 8080);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("legacy.example.com", intent.Host);
|
||||
Assert.Equal(8080, intent.Port);
|
||||
Assert.Equal("http", intent.Protocol);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GrpcEgress_CreatesCorrectIntent()
|
||||
{
|
||||
// Act
|
||||
var intent = NetworkIntent.GrpcEgress("grpc.example.com", "gRPC service");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("grpc.example.com", intent.Host);
|
||||
Assert.Equal(443, intent.Port);
|
||||
Assert.Equal("grpc", intent.Protocol);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MatchesAllowlistEntry_ExactMatch_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var intent = NetworkIntent.HttpsEgress("api.example.com", "test");
|
||||
var entry = new NetworkAllowlistEntry("api.example.com", 443, "https");
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(intent.MatchesAllowlistEntry(entry));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MatchesAllowlistEntry_WildcardMatch_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var intent = NetworkIntent.HttpsEgress("api.example.com", "test");
|
||||
var entry = new NetworkAllowlistEntry("*.example.com");
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(intent.MatchesAllowlistEntry(entry));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MatchesAllowlistEntry_PortMismatch_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var intent = NetworkIntent.HttpsEgress("api.example.com", "test", 8443);
|
||||
var entry = new NetworkAllowlistEntry("api.example.com", 443, "https");
|
||||
|
||||
// Act & Assert
|
||||
Assert.False(intent.MatchesAllowlistEntry(entry));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MatchesAllowlistEntry_ProtocolMismatch_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var intent = new NetworkIntent("api.example.com", 443, "grpc", "test");
|
||||
var entry = new NetworkAllowlistEntry("api.example.com", 443, "https");
|
||||
|
||||
// Act & Assert
|
||||
Assert.False(intent.MatchesAllowlistEntry(entry));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MatchesAllowlistEntry_AnyPort_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var intent = NetworkIntent.HttpsEgress("api.example.com", "test", 8443);
|
||||
var entry = new NetworkAllowlistEntry("api.example.com", null, "https");
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(intent.MatchesAllowlistEntry(entry));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for NetworkIntentValidationResult.
|
||||
/// </summary>
|
||||
public class NetworkIntentValidationResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Success_CreatesValidResult()
|
||||
{
|
||||
// Act
|
||||
var result = NetworkIntentValidationResult.Success();
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.False(result.ShouldBlock);
|
||||
Assert.Null(result.ErrorCode);
|
||||
Assert.Empty(result.Violations);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MissingIntents_CreatesErrorResult()
|
||||
{
|
||||
// Arrange
|
||||
var endpoints = new List<string> { "https://example.com/api" };
|
||||
|
||||
// Act
|
||||
var result = NetworkIntentValidationResult.MissingIntents(endpoints, shouldBlock: true);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.True(result.ShouldBlock);
|
||||
Assert.Equal("NETWORK_INTENT_MISSING", result.ErrorCode);
|
||||
Assert.Single(result.Violations);
|
||||
Assert.NotEmpty(result.Recommendations);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DisallowedIntents_CreatesErrorResult()
|
||||
{
|
||||
// Arrange
|
||||
var violations = new List<NetworkIntentViolation>
|
||||
{
|
||||
new("https://bad.example.com", NetworkViolationType.NotInAllowlist, null)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = NetworkIntentValidationResult.DisallowedIntents(violations, shouldBlock: false);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.False(result.ShouldBlock);
|
||||
Assert.Equal("NETWORK_INTENT_DISALLOWED", result.ErrorCode);
|
||||
Assert.NotEmpty(result.Recommendations);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for NetworkIntentConfig.
|
||||
/// </summary>
|
||||
public class NetworkIntentConfigTests
|
||||
{
|
||||
[Fact]
|
||||
public void Default_HasWarnMode()
|
||||
{
|
||||
var config = NetworkIntentConfig.Default;
|
||||
|
||||
Assert.Equal(EnforcementMode.Warn, config.EnforcementMode);
|
||||
Assert.True(config.RequireExplicitIntents);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Strict_HasStrictMode()
|
||||
{
|
||||
var config = NetworkIntentConfig.Strict;
|
||||
|
||||
Assert.Equal(EnforcementMode.Strict, config.EnforcementMode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Disabled_HasDisabledMode()
|
||||
{
|
||||
var config = NetworkIntentConfig.Disabled;
|
||||
|
||||
Assert.Equal(EnforcementMode.Disabled, config.EnforcementMode);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,917 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
using StellaOps.Orchestrator.Core.Domain.Mirror;
|
||||
using StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Domain.Mirror;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorEventTypes constants.
|
||||
/// Per ORCH-AIRGAP-58-001.
|
||||
/// </summary>
|
||||
public sealed class MirrorEventTypesTests
|
||||
{
|
||||
[Fact]
|
||||
public void AllEventTypes_HaveMirrorPrefix()
|
||||
{
|
||||
var eventTypes = new[]
|
||||
{
|
||||
MirrorEventTypes.BundleStarted,
|
||||
MirrorEventTypes.BundleProgress,
|
||||
MirrorEventTypes.BundleCompleted,
|
||||
MirrorEventTypes.BundleFailed,
|
||||
MirrorEventTypes.ImportStarted,
|
||||
MirrorEventTypes.ImportValidated,
|
||||
MirrorEventTypes.ImportCompleted,
|
||||
MirrorEventTypes.ImportFailed,
|
||||
MirrorEventTypes.VerifyStarted,
|
||||
MirrorEventTypes.VerifyCompleted,
|
||||
MirrorEventTypes.VerifyFailed,
|
||||
MirrorEventTypes.SyncStarted,
|
||||
MirrorEventTypes.SyncProgress,
|
||||
MirrorEventTypes.SyncCompleted,
|
||||
MirrorEventTypes.SyncFailed,
|
||||
MirrorEventTypes.EvidenceCaptured,
|
||||
MirrorEventTypes.ProvenanceRecorded
|
||||
};
|
||||
|
||||
Assert.All(eventTypes, t => Assert.StartsWith(MirrorEventTypes.Prefix, t));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BundleEventTypes_HaveCorrectFormat()
|
||||
{
|
||||
Assert.Equal("mirror.bundle.started", MirrorEventTypes.BundleStarted);
|
||||
Assert.Equal("mirror.bundle.progress", MirrorEventTypes.BundleProgress);
|
||||
Assert.Equal("mirror.bundle.completed", MirrorEventTypes.BundleCompleted);
|
||||
Assert.Equal("mirror.bundle.failed", MirrorEventTypes.BundleFailed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ImportEventTypes_HaveCorrectFormat()
|
||||
{
|
||||
Assert.Equal("mirror.import.started", MirrorEventTypes.ImportStarted);
|
||||
Assert.Equal("mirror.import.validated", MirrorEventTypes.ImportValidated);
|
||||
Assert.Equal("mirror.import.completed", MirrorEventTypes.ImportCompleted);
|
||||
Assert.Equal("mirror.import.failed", MirrorEventTypes.ImportFailed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyEventTypes_HaveCorrectFormat()
|
||||
{
|
||||
Assert.Equal("mirror.verify.started", MirrorEventTypes.VerifyStarted);
|
||||
Assert.Equal("mirror.verify.completed", MirrorEventTypes.VerifyCompleted);
|
||||
Assert.Equal("mirror.verify.failed", MirrorEventTypes.VerifyFailed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SyncEventTypes_HaveCorrectFormat()
|
||||
{
|
||||
Assert.Equal("mirror.sync.started", MirrorEventTypes.SyncStarted);
|
||||
Assert.Equal("mirror.sync.progress", MirrorEventTypes.SyncProgress);
|
||||
Assert.Equal("mirror.sync.completed", MirrorEventTypes.SyncCompleted);
|
||||
Assert.Equal("mirror.sync.failed", MirrorEventTypes.SyncFailed);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorOperationContext record.
|
||||
/// </summary>
|
||||
public sealed class MirrorOperationContextTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_SetsAllProperties()
|
||||
{
|
||||
var jobId = Guid.NewGuid();
|
||||
var operationId = Guid.NewGuid();
|
||||
|
||||
var context = new MirrorOperationContext(
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: "project-1",
|
||||
JobId: jobId,
|
||||
OperationId: operationId,
|
||||
JobType: MirrorJobTypes.Bundle,
|
||||
Actor: "user@example.com",
|
||||
TraceId: "trace-123",
|
||||
SpanId: "span-456",
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: "staging");
|
||||
|
||||
Assert.Equal("tenant-1", context.TenantId);
|
||||
Assert.Equal("project-1", context.ProjectId);
|
||||
Assert.Equal(jobId, context.JobId);
|
||||
Assert.Equal(operationId, context.OperationId);
|
||||
Assert.Equal(MirrorJobTypes.Bundle, context.JobType);
|
||||
Assert.Equal("user@example.com", context.Actor);
|
||||
Assert.Equal("trace-123", context.TraceId);
|
||||
Assert.Equal("span-456", context.SpanId);
|
||||
Assert.Equal("production", context.SourceEnvironment);
|
||||
Assert.Equal("staging", context.TargetEnvironment);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_AllowsNullOptionalFields()
|
||||
{
|
||||
var context = new MirrorOperationContext(
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: null,
|
||||
JobId: Guid.NewGuid(),
|
||||
OperationId: Guid.NewGuid(),
|
||||
JobType: MirrorJobTypes.Bundle,
|
||||
Actor: null,
|
||||
TraceId: null,
|
||||
SpanId: null,
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: null);
|
||||
|
||||
Assert.Null(context.ProjectId);
|
||||
Assert.Null(context.Actor);
|
||||
Assert.Null(context.TraceId);
|
||||
Assert.Null(context.SpanId);
|
||||
Assert.Null(context.TargetEnvironment);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorOperationRecordResult record.
|
||||
/// </summary>
|
||||
public sealed class MirrorOperationRecordResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Success_WithAllData()
|
||||
{
|
||||
var eventId = Guid.NewGuid();
|
||||
var capsuleId = Guid.NewGuid();
|
||||
var pointer = new EvidencePointer(
|
||||
Type: EvidencePointerType.Bundle,
|
||||
BundleId: capsuleId,
|
||||
BundleDigest: "sha256:abc123",
|
||||
AttestationSubject: null,
|
||||
AttestationDigest: null,
|
||||
ManifestUri: null,
|
||||
LockerPath: null);
|
||||
|
||||
var result = new MirrorOperationRecordResult(
|
||||
Success: true,
|
||||
EventId: eventId,
|
||||
CapsuleId: capsuleId,
|
||||
EvidencePointer: pointer,
|
||||
Error: null);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal(eventId, result.EventId);
|
||||
Assert.Equal(capsuleId, result.CapsuleId);
|
||||
Assert.NotNull(result.EvidencePointer);
|
||||
Assert.Null(result.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Failure_WithError()
|
||||
{
|
||||
var result = new MirrorOperationRecordResult(
|
||||
Success: false,
|
||||
EventId: null,
|
||||
CapsuleId: null,
|
||||
EvidencePointer: null,
|
||||
Error: "Something went wrong");
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Null(result.EventId);
|
||||
Assert.Null(result.CapsuleId);
|
||||
Assert.Null(result.EvidencePointer);
|
||||
Assert.Equal("Something went wrong", result.Error);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorImportRequest record.
|
||||
/// </summary>
|
||||
public sealed class MirrorImportRequestTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_SetsAllProperties()
|
||||
{
|
||||
var request = new MirrorImportRequest(
|
||||
BundleUri: "file:///bundles/bundle-123.tar.gz",
|
||||
ExpectedDigest: "sha256:abc123",
|
||||
ValidateSignatures: true,
|
||||
VerifyProvenance: true,
|
||||
MaxStalenessSeconds: 3600);
|
||||
|
||||
Assert.Equal("file:///bundles/bundle-123.tar.gz", request.BundleUri);
|
||||
Assert.Equal("sha256:abc123", request.ExpectedDigest);
|
||||
Assert.True(request.ValidateSignatures);
|
||||
Assert.True(request.VerifyProvenance);
|
||||
Assert.Equal(3600, request.MaxStalenessSeconds);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorImportValidation record.
|
||||
/// </summary>
|
||||
public sealed class MirrorImportValidationTests
|
||||
{
|
||||
[Fact]
|
||||
public void ValidBundle_HasAllValidationData()
|
||||
{
|
||||
var validation = new MirrorImportValidation(
|
||||
IsValid: true,
|
||||
BundleDigest: "sha256:bundle123",
|
||||
ManifestDigest: "sha256:manifest456",
|
||||
SignatureVerified: true,
|
||||
ProvenanceVerified: true,
|
||||
StalenessSeconds: 1200,
|
||||
Warnings: null);
|
||||
|
||||
Assert.True(validation.IsValid);
|
||||
Assert.True(validation.SignatureVerified);
|
||||
Assert.True(validation.ProvenanceVerified);
|
||||
Assert.Equal(1200, validation.StalenessSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InvalidBundle_IncludesWarnings()
|
||||
{
|
||||
var validation = new MirrorImportValidation(
|
||||
IsValid: false,
|
||||
BundleDigest: "sha256:bundle123",
|
||||
ManifestDigest: "sha256:manifest456",
|
||||
SignatureVerified: false,
|
||||
ProvenanceVerified: false,
|
||||
StalenessSeconds: 86400,
|
||||
Warnings: new[] { "Signature invalid", "Bundle too stale" });
|
||||
|
||||
Assert.False(validation.IsValid);
|
||||
Assert.NotNull(validation.Warnings);
|
||||
Assert.Equal(2, validation.Warnings.Count);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorImportResult and MirrorImportProvenance.
|
||||
/// </summary>
|
||||
public sealed class MirrorImportResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_WithProvenance()
|
||||
{
|
||||
var bundleId = Guid.NewGuid();
|
||||
var provenance = new MirrorImportProvenance(
|
||||
BundleId: bundleId,
|
||||
SourceEnvironment: "production",
|
||||
OriginalCreatedAt: DateTimeOffset.UtcNow.AddHours(-2),
|
||||
BundleDigest: "sha256:abc123",
|
||||
SigningKeyId: "key-001",
|
||||
ImportedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var result = new MirrorImportResult(
|
||||
DomainsImported: 5,
|
||||
RecordsImported: 1500,
|
||||
DurationSeconds: 45.5,
|
||||
TimeAnchor: null,
|
||||
Provenance: provenance);
|
||||
|
||||
Assert.Equal(5, result.DomainsImported);
|
||||
Assert.Equal(1500, result.RecordsImported);
|
||||
Assert.Equal(45.5, result.DurationSeconds);
|
||||
Assert.Equal(bundleId, result.Provenance.BundleId);
|
||||
Assert.Equal("production", result.Provenance.SourceEnvironment);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorOperationEvidence and related types.
|
||||
/// </summary>
|
||||
public sealed class MirrorOperationEvidenceTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_ForBundleExport()
|
||||
{
|
||||
var operationId = Guid.NewGuid();
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: operationId,
|
||||
OperationType: MirrorOperationType.BundleExport,
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: "project-1",
|
||||
JobId: jobId,
|
||||
Status: MirrorOperationStatus.Completed,
|
||||
StartedAt: DateTimeOffset.UtcNow.AddMinutes(-5),
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: "staging",
|
||||
BundleDigest: "sha256:abc123",
|
||||
ManifestDigest: "sha256:manifest456",
|
||||
ProvenanceUri: "s3://bundles/provenance.json",
|
||||
AuditTrailUri: "s3://bundles/audit.ndjson",
|
||||
DomainsCount: 5,
|
||||
RecordsCount: 1000,
|
||||
SizeBytes: 1024 * 1024,
|
||||
DurationSeconds: 120.5,
|
||||
Error: null);
|
||||
|
||||
Assert.Equal(operationId, evidence.OperationId);
|
||||
Assert.Equal(MirrorOperationType.BundleExport, evidence.OperationType);
|
||||
Assert.Equal(MirrorOperationStatus.Completed, evidence.Status);
|
||||
Assert.Equal("sha256:abc123", evidence.BundleDigest);
|
||||
Assert.Equal(5, evidence.DomainsCount);
|
||||
Assert.Null(evidence.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ForFailedOperation()
|
||||
{
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: Guid.NewGuid(),
|
||||
OperationType: MirrorOperationType.BundleImport,
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: null,
|
||||
JobId: Guid.NewGuid(),
|
||||
Status: MirrorOperationStatus.Failed,
|
||||
StartedAt: DateTimeOffset.UtcNow.AddMinutes(-1),
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: null,
|
||||
BundleDigest: null,
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: 0,
|
||||
RecordsCount: 0,
|
||||
SizeBytes: 0,
|
||||
DurationSeconds: 0,
|
||||
Error: new MirrorOperationError("VALIDATION_FAILED", "Bundle signature invalid"));
|
||||
|
||||
Assert.Equal(MirrorOperationStatus.Failed, evidence.Status);
|
||||
Assert.NotNull(evidence.Error);
|
||||
Assert.Equal("VALIDATION_FAILED", evidence.Error.Code);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(MirrorOperationType.BundleExport)]
|
||||
[InlineData(MirrorOperationType.BundleImport)]
|
||||
[InlineData(MirrorOperationType.BundleVerify)]
|
||||
[InlineData(MirrorOperationType.BundleSync)]
|
||||
[InlineData(MirrorOperationType.BundleDiff)]
|
||||
public void OperationType_AllValuesSupported(MirrorOperationType operationType)
|
||||
{
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: Guid.NewGuid(),
|
||||
OperationType: operationType,
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: null,
|
||||
JobId: Guid.NewGuid(),
|
||||
Status: MirrorOperationStatus.Completed,
|
||||
StartedAt: DateTimeOffset.UtcNow,
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: null,
|
||||
BundleDigest: null,
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: 0,
|
||||
RecordsCount: 0,
|
||||
SizeBytes: 0,
|
||||
DurationSeconds: 0,
|
||||
Error: null);
|
||||
|
||||
Assert.Equal(operationType, evidence.OperationType);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(MirrorOperationStatus.Started)]
|
||||
[InlineData(MirrorOperationStatus.InProgress)]
|
||||
[InlineData(MirrorOperationStatus.Completed)]
|
||||
[InlineData(MirrorOperationStatus.Failed)]
|
||||
[InlineData(MirrorOperationStatus.Cancelled)]
|
||||
public void OperationStatus_AllValuesSupported(MirrorOperationStatus status)
|
||||
{
|
||||
var evidence = new MirrorOperationEvidence(
|
||||
OperationId: Guid.NewGuid(),
|
||||
OperationType: MirrorOperationType.BundleExport,
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: null,
|
||||
JobId: Guid.NewGuid(),
|
||||
Status: status,
|
||||
StartedAt: DateTimeOffset.UtcNow,
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: null,
|
||||
BundleDigest: null,
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: 0,
|
||||
RecordsCount: 0,
|
||||
SizeBytes: 0,
|
||||
DurationSeconds: 0,
|
||||
Error: null);
|
||||
|
||||
Assert.Equal(status, evidence.Status);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for InMemoryMirrorEvidenceStore.
|
||||
/// </summary>
|
||||
public sealed class InMemoryMirrorEvidenceStoreTests
|
||||
{
|
||||
private MirrorOperationEvidence CreateTestEvidence(Guid? operationId = null, Guid? jobId = null) =>
|
||||
new(
|
||||
OperationId: operationId ?? Guid.NewGuid(),
|
||||
OperationType: MirrorOperationType.BundleExport,
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: null,
|
||||
JobId: jobId ?? Guid.NewGuid(),
|
||||
Status: MirrorOperationStatus.Completed,
|
||||
StartedAt: DateTimeOffset.UtcNow,
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: null,
|
||||
BundleDigest: "sha256:abc123",
|
||||
ManifestDigest: null,
|
||||
ProvenanceUri: null,
|
||||
AuditTrailUri: null,
|
||||
DomainsCount: 1,
|
||||
RecordsCount: 100,
|
||||
SizeBytes: 1024,
|
||||
DurationSeconds: 10,
|
||||
Error: null);
|
||||
|
||||
[Fact]
|
||||
public async Task Store_AddsEvidence()
|
||||
{
|
||||
var store = new InMemoryMirrorEvidenceStore();
|
||||
var evidence = CreateTestEvidence();
|
||||
|
||||
await store.StoreAsync(evidence);
|
||||
|
||||
Assert.Equal(1, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsStoredEvidence()
|
||||
{
|
||||
var store = new InMemoryMirrorEvidenceStore();
|
||||
var operationId = Guid.NewGuid();
|
||||
var evidence = CreateTestEvidence(operationId);
|
||||
|
||||
await store.StoreAsync(evidence);
|
||||
var retrieved = await store.GetAsync(operationId);
|
||||
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(operationId, retrieved.OperationId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsNullForMissingEvidence()
|
||||
{
|
||||
var store = new InMemoryMirrorEvidenceStore();
|
||||
|
||||
var retrieved = await store.GetAsync(Guid.NewGuid());
|
||||
|
||||
Assert.Null(retrieved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListForJob_ReturnsMatchingEvidence()
|
||||
{
|
||||
var store = new InMemoryMirrorEvidenceStore();
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
await store.StoreAsync(CreateTestEvidence(jobId: jobId));
|
||||
await store.StoreAsync(CreateTestEvidence(jobId: jobId));
|
||||
await store.StoreAsync(CreateTestEvidence()); // Different job
|
||||
|
||||
var forJob = await store.ListForJobAsync(jobId);
|
||||
|
||||
Assert.Equal(2, forJob.Count);
|
||||
Assert.All(forJob, e => Assert.Equal(jobId, e.JobId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Clear_RemovesAllEvidence()
|
||||
{
|
||||
var store = new InMemoryMirrorEvidenceStore();
|
||||
store.StoreAsync(CreateTestEvidence()).Wait();
|
||||
store.StoreAsync(CreateTestEvidence()).Wait();
|
||||
|
||||
store.Clear();
|
||||
|
||||
Assert.Equal(0, store.Count);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test implementation of ITimelineEventEmitter.
|
||||
/// </summary>
|
||||
internal sealed class TestTimelineEventEmitter : ITimelineEventEmitter
|
||||
{
|
||||
private readonly List<(string TenantId, Guid JobId, string EventType)> _emittedEvents = new();
|
||||
private bool _shouldFail;
|
||||
|
||||
public IReadOnlyList<(string TenantId, Guid JobId, string EventType)> EmittedEvents => _emittedEvents;
|
||||
|
||||
public void SetShouldFail(bool fail) => _shouldFail = fail;
|
||||
|
||||
public Task<TimelineEmitResult> EmitAsync(TimelineEvent evt, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_shouldFail)
|
||||
throw new InvalidOperationException("Emitter failed");
|
||||
|
||||
_emittedEvents.Add((evt.TenantId, evt.JobId ?? Guid.Empty, evt.EventType));
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public Task<TimelineBatchEmitResult> EmitBatchAsync(IEnumerable<TimelineEvent> events, CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (var evt in events)
|
||||
{
|
||||
_emittedEvents.Add((evt.TenantId, evt.JobId ?? Guid.Empty, evt.EventType));
|
||||
}
|
||||
return Task.FromResult(new TimelineBatchEmitResult(events.Count(), 0, 0, []));
|
||||
}
|
||||
|
||||
public Task<TimelineEmitResult> EmitJobEventAsync(
|
||||
string tenantId, Guid jobId, string eventType,
|
||||
object? payload = null, string? actor = null, string? correlationId = null,
|
||||
string? traceId = null, string? projectId = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_shouldFail)
|
||||
throw new InvalidOperationException("Emitter failed");
|
||||
|
||||
_emittedEvents.Add((tenantId, jobId, eventType));
|
||||
var evt = TimelineEvent.Create(tenantId, eventType, "test", DateTimeOffset.UtcNow, jobId: jobId);
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public Task<TimelineEmitResult> EmitRunEventAsync(
|
||||
string tenantId, Guid runId, string eventType,
|
||||
object? payload = null, string? actor = null, string? correlationId = null,
|
||||
string? traceId = null, string? projectId = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((tenantId, runId, eventType));
|
||||
var evt = TimelineEvent.Create(tenantId, eventType, "test", DateTimeOffset.UtcNow, runId: runId);
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public void Clear() => _emittedEvents.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test implementation of IJobCapsuleGenerator.
|
||||
/// </summary>
|
||||
internal sealed class TestJobCapsuleGenerator : IJobCapsuleGenerator
|
||||
{
|
||||
private readonly List<JobCapsuleRequest> _requests = new();
|
||||
public IReadOnlyList<JobCapsuleRequest> Requests => _requests;
|
||||
|
||||
public Task<JobCapsuleResult> GenerateJobSchedulingCapsuleAsync(
|
||||
JobCapsuleRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_requests.Add(request);
|
||||
var capsuleId = Guid.NewGuid();
|
||||
return Task.FromResult(new JobCapsuleResult(true, null, CreatePointer(capsuleId), null));
|
||||
}
|
||||
|
||||
public Task<JobCapsuleResult> GenerateJobCompletionCapsuleAsync(
|
||||
JobCapsuleRequest request, JobCapsuleOutputs outputs,
|
||||
IReadOnlyList<JobCapsuleArtifact>? artifacts = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_requests.Add(request);
|
||||
var capsuleId = Guid.NewGuid();
|
||||
return Task.FromResult(new JobCapsuleResult(true, null, CreatePointer(capsuleId), null));
|
||||
}
|
||||
|
||||
public Task<JobCapsuleResult> GenerateJobFailureCapsuleAsync(
|
||||
JobCapsuleRequest request, JobCapsuleError error,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_requests.Add(request);
|
||||
var capsuleId = Guid.NewGuid();
|
||||
return Task.FromResult(new JobCapsuleResult(true, null, CreatePointer(capsuleId), null));
|
||||
}
|
||||
|
||||
public Task<JobCapsuleResult> GenerateRunCompletionCapsuleAsync(
|
||||
string tenantId, Guid runId, string? projectId,
|
||||
IReadOnlyList<JobCapsule> jobCapsules,
|
||||
IReadOnlyDictionary<string, string>? metadata = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var capsuleId = Guid.NewGuid();
|
||||
return Task.FromResult(new JobCapsuleResult(true, null, CreatePointer(capsuleId), null));
|
||||
}
|
||||
|
||||
private static EvidencePointer CreatePointer(Guid capsuleId) =>
|
||||
new(EvidencePointerType.Bundle, capsuleId, "sha256:test", null, null, null, null);
|
||||
|
||||
public void Clear() => _requests.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorOperationRecorder.
|
||||
/// </summary>
|
||||
public sealed class MirrorOperationRecorderTests
|
||||
{
|
||||
private readonly TestTimelineEventEmitter _emitter;
|
||||
private readonly TestJobCapsuleGenerator _capsuleGenerator;
|
||||
private readonly InMemoryMirrorEvidenceStore _evidenceStore;
|
||||
private readonly MirrorOperationRecorder _recorder;
|
||||
|
||||
public MirrorOperationRecorderTests()
|
||||
{
|
||||
_emitter = new TestTimelineEventEmitter();
|
||||
_capsuleGenerator = new TestJobCapsuleGenerator();
|
||||
_evidenceStore = new InMemoryMirrorEvidenceStore();
|
||||
|
||||
_recorder = new MirrorOperationRecorder(
|
||||
_emitter,
|
||||
_capsuleGenerator,
|
||||
_evidenceStore,
|
||||
NullLogger<MirrorOperationRecorder>.Instance);
|
||||
}
|
||||
|
||||
private MirrorOperationContext CreateContext() =>
|
||||
new(
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: "project-1",
|
||||
JobId: Guid.NewGuid(),
|
||||
OperationId: Guid.NewGuid(),
|
||||
JobType: MirrorJobTypes.Bundle,
|
||||
Actor: "user@example.com",
|
||||
TraceId: "trace-123",
|
||||
SpanId: "span-456",
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: "staging");
|
||||
|
||||
private MirrorBundlePayload CreatePayload() =>
|
||||
MirrorBundlePayload.Default(new[] { "vex-advisories", "vulnerability-feeds" });
|
||||
|
||||
private MirrorBundleResult CreateResult() =>
|
||||
new(
|
||||
OutputUri: "s3://bundles/bundle-123.tar.gz",
|
||||
BundleDigest: "sha256:abc123",
|
||||
ManifestDigest: "sha256:manifest456",
|
||||
BundleSizeBytes: 1024 * 1024,
|
||||
IncludedDomains: new[] { "vex-advisories", "vulnerability-feeds" },
|
||||
Exports: new[]
|
||||
{
|
||||
new ExportRecord(Guid.NewGuid(), "vex-advisories", ExportFormat.Ndjson, DateTimeOffset.UtcNow, "sha256:export1", 100),
|
||||
new ExportRecord(Guid.NewGuid(), "vulnerability-feeds", ExportFormat.Ndjson, DateTimeOffset.UtcNow, "sha256:export2", 200)
|
||||
},
|
||||
ProvenanceUri: "s3://bundles/provenance.json",
|
||||
AuditTrailUri: "s3://bundles/audit.ndjson",
|
||||
AuditEntryCount: 50,
|
||||
TimeAnchor: new TimeAnchor(TimeAnchorType.Ntp, DateTimeOffset.UtcNow, null, null, null, false),
|
||||
Compression: "gzip",
|
||||
SourceEnvironment: "production",
|
||||
TargetEnvironment: "staging",
|
||||
GeneratedAt: DateTimeOffset.UtcNow,
|
||||
DurationSeconds: 120.5,
|
||||
Signature: null);
|
||||
|
||||
[Fact]
|
||||
public async Task RecordBundleStarted_EmitsTimelineEvent()
|
||||
{
|
||||
var context = CreateContext();
|
||||
var payload = CreatePayload();
|
||||
|
||||
var result = await _recorder.RecordBundleStartedAsync(context, payload);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.EventId);
|
||||
Assert.Null(result.CapsuleId); // No capsule for started event
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(context.TenantId, emittedEvent.TenantId);
|
||||
Assert.Equal(context.JobId, emittedEvent.JobId);
|
||||
Assert.Equal(MirrorEventTypes.BundleStarted, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordBundleProgress_EmitsTimelineEvent()
|
||||
{
|
||||
var context = CreateContext();
|
||||
var progress = new MirrorBundleProgress(
|
||||
Phase: MirrorPhase.CollectingDomainData,
|
||||
DomainsProcessed: 1,
|
||||
TotalDomains: 2,
|
||||
RecordsProcessed: 100,
|
||||
BytesWritten: 1024,
|
||||
AuditEntriesCollected: 10,
|
||||
Message: "Processing domains");
|
||||
|
||||
var result = await _recorder.RecordBundleProgressAsync(context, progress);
|
||||
|
||||
Assert.True(result.Success);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.BundleProgress, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordBundleCompleted_StoresEvidence_And_CreatesCapsule()
|
||||
{
|
||||
var context = CreateContext();
|
||||
var bundleResult = CreateResult();
|
||||
|
||||
var result = await _recorder.RecordBundleCompletedAsync(context, bundleResult);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.EventId);
|
||||
Assert.NotNull(result.EvidencePointer);
|
||||
|
||||
// Verify evidence was stored
|
||||
Assert.Equal(1, _evidenceStore.Count);
|
||||
var evidence = await _evidenceStore.GetAsync(context.OperationId);
|
||||
Assert.NotNull(evidence);
|
||||
Assert.Equal(MirrorOperationType.BundleExport, evidence.OperationType);
|
||||
Assert.Equal(MirrorOperationStatus.Completed, evidence.Status);
|
||||
Assert.Equal(bundleResult.BundleDigest, evidence.BundleDigest);
|
||||
|
||||
// Verify capsule was generated
|
||||
var capsuleRequest = Assert.Single(_capsuleGenerator.Requests);
|
||||
Assert.Equal(context.TenantId, capsuleRequest.TenantId);
|
||||
Assert.Equal(context.JobId, capsuleRequest.JobId);
|
||||
|
||||
// Verify timeline event was emitted
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.BundleCompleted, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordBundleFailed_StoresEvidence_WithError()
|
||||
{
|
||||
var context = CreateContext();
|
||||
|
||||
var result = await _recorder.RecordBundleFailedAsync(
|
||||
context, "BUNDLE_CREATE_FAILED", "Insufficient disk space");
|
||||
|
||||
Assert.True(result.Success);
|
||||
|
||||
// Verify evidence was stored with error
|
||||
var evidence = await _evidenceStore.GetAsync(context.OperationId);
|
||||
Assert.NotNull(evidence);
|
||||
Assert.Equal(MirrorOperationStatus.Failed, evidence.Status);
|
||||
Assert.NotNull(evidence.Error);
|
||||
Assert.Equal("BUNDLE_CREATE_FAILED", evidence.Error.Code);
|
||||
Assert.Equal("Insufficient disk space", evidence.Error.Message);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.BundleFailed, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordImportStarted_EmitsTimelineEvent()
|
||||
{
|
||||
var context = CreateContext();
|
||||
var request = new MirrorImportRequest(
|
||||
BundleUri: "file:///bundles/bundle-123.tar.gz",
|
||||
ExpectedDigest: "sha256:abc123",
|
||||
ValidateSignatures: true,
|
||||
VerifyProvenance: true,
|
||||
MaxStalenessSeconds: 3600);
|
||||
|
||||
var result = await _recorder.RecordImportStartedAsync(context, request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.ImportStarted, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordImportValidated_EmitsTimelineEvent()
|
||||
{
|
||||
var context = CreateContext();
|
||||
var validation = new MirrorImportValidation(
|
||||
IsValid: true,
|
||||
BundleDigest: "sha256:bundle123",
|
||||
ManifestDigest: "sha256:manifest456",
|
||||
SignatureVerified: true,
|
||||
ProvenanceVerified: true,
|
||||
StalenessSeconds: 1200,
|
||||
Warnings: null);
|
||||
|
||||
var result = await _recorder.RecordImportValidatedAsync(context, validation);
|
||||
|
||||
Assert.True(result.Success);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.ImportValidated, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordImportCompleted_StoresEvidence()
|
||||
{
|
||||
var context = CreateContext();
|
||||
var provenance = new MirrorImportProvenance(
|
||||
BundleId: Guid.NewGuid(),
|
||||
SourceEnvironment: "production",
|
||||
OriginalCreatedAt: DateTimeOffset.UtcNow.AddHours(-2),
|
||||
BundleDigest: "sha256:abc123",
|
||||
SigningKeyId: "key-001",
|
||||
ImportedAt: DateTimeOffset.UtcNow);
|
||||
var importResult = new MirrorImportResult(
|
||||
DomainsImported: 5,
|
||||
RecordsImported: 1500,
|
||||
DurationSeconds: 45.5,
|
||||
TimeAnchor: null,
|
||||
Provenance: provenance);
|
||||
|
||||
var result = await _recorder.RecordImportCompletedAsync(context, importResult);
|
||||
|
||||
Assert.True(result.Success);
|
||||
|
||||
// Verify evidence was stored
|
||||
var evidence = await _evidenceStore.GetAsync(context.OperationId);
|
||||
Assert.NotNull(evidence);
|
||||
Assert.Equal(MirrorOperationType.BundleImport, evidence.OperationType);
|
||||
Assert.Equal(MirrorOperationStatus.Completed, evidence.Status);
|
||||
Assert.Equal(5, evidence.DomainsCount);
|
||||
Assert.Equal(1500, evidence.RecordsCount);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.ImportCompleted, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordImportFailed_StoresEvidence_WithError()
|
||||
{
|
||||
var context = CreateContext();
|
||||
|
||||
var result = await _recorder.RecordImportFailedAsync(
|
||||
context, "VALIDATION_FAILED", "Bundle signature invalid");
|
||||
|
||||
Assert.True(result.Success);
|
||||
|
||||
var evidence = await _evidenceStore.GetAsync(context.OperationId);
|
||||
Assert.NotNull(evidence);
|
||||
Assert.Equal(MirrorOperationType.BundleImport, evidence.OperationType);
|
||||
Assert.Equal(MirrorOperationStatus.Failed, evidence.Status);
|
||||
Assert.NotNull(evidence.Error);
|
||||
Assert.Equal("VALIDATION_FAILED", evidence.Error.Code);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal(MirrorEventTypes.ImportFailed, emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordBundleStarted_HandlesEmitterException()
|
||||
{
|
||||
_emitter.SetShouldFail(true);
|
||||
|
||||
var context = CreateContext();
|
||||
var payload = CreatePayload();
|
||||
|
||||
var result = await _recorder.RecordBundleStartedAsync(context, payload);
|
||||
|
||||
Assert.False(result.Success);
|
||||
Assert.Contains("Emitter failed", result.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_ThrowsOnNullDependencies()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => new MirrorOperationRecorder(
|
||||
null!, _capsuleGenerator, _evidenceStore, NullLogger<MirrorOperationRecorder>.Instance));
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => new MirrorOperationRecorder(
|
||||
_emitter, null!, _evidenceStore, NullLogger<MirrorOperationRecorder>.Instance));
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => new MirrorOperationRecorder(
|
||||
_emitter, _capsuleGenerator, null!, NullLogger<MirrorOperationRecorder>.Instance));
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => new MirrorOperationRecorder(
|
||||
_emitter, _capsuleGenerator, _evidenceStore, null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordMultipleBundleOperations_TracksAll()
|
||||
{
|
||||
var context = CreateContext();
|
||||
|
||||
// Record bundle lifecycle
|
||||
await _recorder.RecordBundleStartedAsync(context, CreatePayload());
|
||||
await _recorder.RecordBundleProgressAsync(context, new MirrorBundleProgress(
|
||||
MirrorPhase.CollectingDomainData, 1, 2, 50, 512, 5, "Collecting"));
|
||||
await _recorder.RecordBundleProgressAsync(context, new MirrorBundleProgress(
|
||||
MirrorPhase.Compressing, 2, 2, 100, 1024, 10, "Compressing"));
|
||||
await _recorder.RecordBundleCompletedAsync(context, CreateResult());
|
||||
|
||||
Assert.Equal(4, _emitter.EmittedEvents.Count);
|
||||
Assert.Equal(MirrorEventTypes.BundleStarted, _emitter.EmittedEvents[0].EventType);
|
||||
Assert.Equal(MirrorEventTypes.BundleProgress, _emitter.EmittedEvents[1].EventType);
|
||||
Assert.Equal(MirrorEventTypes.BundleProgress, _emitter.EmittedEvents[2].EventType);
|
||||
Assert.Equal(MirrorEventTypes.BundleCompleted, _emitter.EmittedEvents[3].EventType);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,761 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
using StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JobAttestation domain models.
|
||||
/// Per ORCH-OBS-54-001.
|
||||
/// </summary>
|
||||
public sealed class JobAttestationModelTests
|
||||
{
|
||||
[Fact]
|
||||
public void JobAttestation_ToJson_ProducesValidJson()
|
||||
{
|
||||
var attestation = CreateTestAttestation();
|
||||
|
||||
var json = attestation.ToJson();
|
||||
|
||||
Assert.NotNull(json);
|
||||
Assert.Contains("\"attestationId\":", json);
|
||||
Assert.Contains("\"tenantId\":\"tenant-1\"", json);
|
||||
Assert.Contains("\"predicateType\":", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobAttestation_FromJson_Roundtrips()
|
||||
{
|
||||
var original = CreateTestAttestation();
|
||||
|
||||
var json = original.ToJson();
|
||||
var restored = JobAttestation.FromJson(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.AttestationId, restored.AttestationId);
|
||||
Assert.Equal(original.TenantId, restored.TenantId);
|
||||
Assert.Equal(original.JobId, restored.JobId);
|
||||
Assert.Equal(original.PredicateType, restored.PredicateType);
|
||||
Assert.Equal(original.PayloadDigest, restored.PayloadDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobAttestation_Constants_HaveExpectedValues()
|
||||
{
|
||||
Assert.Equal("1.0.0", JobAttestation.CurrentSchemaVersion);
|
||||
Assert.Equal("https://in-toto.io/Statement/v1", JobAttestation.InTotoStatementV1);
|
||||
Assert.Equal("https://in-toto.io/Statement/v0.1", JobAttestation.InTotoStatementV01);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DsseEnvelope_DecodePayload_ReturnsCorrectBytes()
|
||||
{
|
||||
var originalPayload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
var envelope = new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String(originalPayload),
|
||||
PayloadType: DsseEnvelope.InTotoPayloadType,
|
||||
Signatures: new[] { new DsseSignature("key-1", "sig") });
|
||||
|
||||
var decoded = envelope.DecodePayload();
|
||||
|
||||
Assert.Equal(originalPayload, decoded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DsseEnvelope_ComputePayloadDigest_ReturnsValidSha256()
|
||||
{
|
||||
var payload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
var envelope = new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String(payload),
|
||||
PayloadType: DsseEnvelope.InTotoPayloadType,
|
||||
Signatures: new[] { new DsseSignature("key-1", "sig") });
|
||||
|
||||
var digest = envelope.ComputePayloadDigest();
|
||||
|
||||
Assert.NotNull(digest);
|
||||
Assert.StartsWith("sha256:", digest);
|
||||
Assert.Equal(71, digest.Length); // "sha256:" + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DsseEnvelope_InTotoPayloadType_HasExpectedValue()
|
||||
{
|
||||
Assert.Equal("application/vnd.in-toto+json", DsseEnvelope.InTotoPayloadType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AttestationSubject_StoresDigests()
|
||||
{
|
||||
var subject = new AttestationSubject(
|
||||
Name: "job:tenant-1/12345",
|
||||
Digest: new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = "abc123",
|
||||
["sha512"] = "def456"
|
||||
});
|
||||
|
||||
Assert.Equal("job:tenant-1/12345", subject.Name);
|
||||
Assert.Equal(2, subject.Digest.Count);
|
||||
Assert.Equal("abc123", subject.Digest["sha256"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InTotoStatement_ToCanonicalJson_IsDeterministic()
|
||||
{
|
||||
var predicate = JsonSerializer.SerializeToElement(new { key = "value" });
|
||||
var statement = new InTotoStatement(
|
||||
Type: JobAttestation.InTotoStatementV1,
|
||||
Subject: new[]
|
||||
{
|
||||
new InTotoSubject("subject-1", new Dictionary<string, string> { ["sha256"] = "abc" }),
|
||||
new InTotoSubject("subject-2", new Dictionary<string, string> { ["sha256"] = "def" })
|
||||
},
|
||||
PredicateType: JobPredicateTypes.JobCompletion,
|
||||
Predicate: predicate);
|
||||
|
||||
var json1 = statement.ToCanonicalJson();
|
||||
var json2 = statement.ToCanonicalJson();
|
||||
|
||||
Assert.Equal(json1, json2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InTotoStatement_ToCanonicalJson_SortsDigestKeys()
|
||||
{
|
||||
var predicate = JsonSerializer.SerializeToElement(new { key = "value" });
|
||||
var statement = new InTotoStatement(
|
||||
Type: JobAttestation.InTotoStatementV1,
|
||||
Subject: new[]
|
||||
{
|
||||
new InTotoSubject("subject-1", new Dictionary<string, string>
|
||||
{
|
||||
["sha512"] = "def",
|
||||
["sha256"] = "abc"
|
||||
})
|
||||
},
|
||||
PredicateType: JobPredicateTypes.JobCompletion,
|
||||
Predicate: predicate);
|
||||
|
||||
var json = System.Text.Encoding.UTF8.GetString(statement.ToCanonicalJson());
|
||||
|
||||
// sha256 should come before sha512 due to alphabetical sorting
|
||||
var sha256Index = json.IndexOf("sha256");
|
||||
var sha512Index = json.IndexOf("sha512");
|
||||
Assert.True(sha256Index < sha512Index, "Digest keys should be sorted alphabetically");
|
||||
}
|
||||
|
||||
private JobAttestation CreateTestAttestation()
|
||||
{
|
||||
return new JobAttestation(
|
||||
AttestationId: Guid.NewGuid(),
|
||||
TenantId: "tenant-1",
|
||||
JobId: Guid.NewGuid(),
|
||||
RunId: Guid.NewGuid(),
|
||||
ProjectId: "project-1",
|
||||
StatementType: JobAttestation.InTotoStatementV1,
|
||||
PredicateType: JobPredicateTypes.JobCompletion,
|
||||
Subjects: new[]
|
||||
{
|
||||
new AttestationSubject("job:tenant-1/123", new Dictionary<string, string> { ["sha256"] = "abc123" })
|
||||
},
|
||||
Envelope: new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String("{}"u8.ToArray()),
|
||||
PayloadType: DsseEnvelope.InTotoPayloadType,
|
||||
Signatures: new[] { new DsseSignature("key-1", "sig") }),
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
PayloadDigest: "sha256:abc123",
|
||||
EvidencePointer: null);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JobPredicateTypes.
|
||||
/// </summary>
|
||||
public sealed class JobPredicateTypesTests
|
||||
{
|
||||
[Fact]
|
||||
public void PredicateTypes_HaveExpectedValues()
|
||||
{
|
||||
Assert.Equal("stella.ops/job-completion@v1", JobPredicateTypes.JobCompletion);
|
||||
Assert.Equal("stella.ops/job-scheduling@v1", JobPredicateTypes.JobScheduling);
|
||||
Assert.Equal("stella.ops/run-completion@v1", JobPredicateTypes.RunCompletion);
|
||||
Assert.Equal("stella.ops/evidence@v1", JobPredicateTypes.Evidence);
|
||||
Assert.Equal("stella.ops/mirror-bundle@v1", JobPredicateTypes.MirrorBundle);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("stella.ops/job-completion@v1", true)]
|
||||
[InlineData("stella.ops/evidence@v1", true)]
|
||||
[InlineData("https://slsa.dev/provenance/v1", false)]
|
||||
[InlineData("custom/type", false)]
|
||||
public void IsStellaOpsType_ReturnsCorrectResult(string predicateType, bool expected)
|
||||
{
|
||||
var result = JobPredicateTypes.IsStellaOpsType(predicateType);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JobCompletionPredicate.
|
||||
/// </summary>
|
||||
public sealed class JobCompletionPredicateTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_WithAllFields()
|
||||
{
|
||||
var predicate = new JobCompletionPredicate(
|
||||
JobId: Guid.NewGuid(),
|
||||
RunId: Guid.NewGuid(),
|
||||
JobType: "scan.image",
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: "project-1",
|
||||
Status: "completed",
|
||||
ExitCode: 0,
|
||||
StartedAt: DateTimeOffset.UtcNow.AddMinutes(-5),
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
DurationSeconds: 300,
|
||||
InputHash: "sha256:input123",
|
||||
OutputHash: "sha256:output456",
|
||||
Artifacts: new[]
|
||||
{
|
||||
new ArtifactDigest("output.json", "sha256:artifact123", 1024)
|
||||
},
|
||||
Environment: new JobEnvironmentInfo("worker-1", "1.0.0", "sha256:image123"),
|
||||
CapsuleId: "capsule-123",
|
||||
CapsuleDigest: "sha256:capsule456");
|
||||
|
||||
Assert.Equal("completed", predicate.Status);
|
||||
Assert.Equal(0, predicate.ExitCode);
|
||||
Assert.NotNull(predicate.Artifacts);
|
||||
Assert.Single(predicate.Artifacts);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithMinimalFields()
|
||||
{
|
||||
var predicate = new JobCompletionPredicate(
|
||||
JobId: Guid.NewGuid(),
|
||||
RunId: null,
|
||||
JobType: "test.job",
|
||||
TenantId: "tenant-1",
|
||||
ProjectId: null,
|
||||
Status: "completed",
|
||||
ExitCode: null,
|
||||
StartedAt: null,
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
DurationSeconds: 0,
|
||||
InputHash: null,
|
||||
OutputHash: null,
|
||||
Artifacts: null,
|
||||
Environment: null,
|
||||
CapsuleId: null,
|
||||
CapsuleDigest: null);
|
||||
|
||||
Assert.Null(predicate.RunId);
|
||||
Assert.Null(predicate.ExitCode);
|
||||
Assert.Null(predicate.Artifacts);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for HmacJobAttestationSigner.
|
||||
/// </summary>
|
||||
public sealed class HmacJobAttestationSignerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SignAsync_CreatesDsseEnvelope()
|
||||
{
|
||||
var signer = new HmacJobAttestationSigner();
|
||||
var payload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
|
||||
var envelope = await signer.SignAsync(payload, DsseEnvelope.InTotoPayloadType);
|
||||
|
||||
Assert.NotNull(envelope);
|
||||
Assert.Equal(DsseEnvelope.InTotoPayloadType, envelope.PayloadType);
|
||||
Assert.Single(envelope.Signatures);
|
||||
Assert.NotEmpty(envelope.Signatures[0].Sig);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsTrueForValidSignature()
|
||||
{
|
||||
var signer = new HmacJobAttestationSigner();
|
||||
var payload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
|
||||
var envelope = await signer.SignAsync(payload, DsseEnvelope.InTotoPayloadType);
|
||||
var result = await signer.VerifyAsync(envelope);
|
||||
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsFalseForTamperedPayload()
|
||||
{
|
||||
var signer = new HmacJobAttestationSigner();
|
||||
var originalPayload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
|
||||
var envelope = await signer.SignAsync(originalPayload, DsseEnvelope.InTotoPayloadType);
|
||||
|
||||
// Create tampered envelope with different payload
|
||||
var tamperedPayload = "{\"test\":\"tampered\"}"u8.ToArray();
|
||||
var tamperedEnvelope = new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String(tamperedPayload),
|
||||
PayloadType: envelope.PayloadType,
|
||||
Signatures: envelope.Signatures);
|
||||
|
||||
var result = await signer.VerifyAsync(tamperedEnvelope);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsFalseForDifferentKey()
|
||||
{
|
||||
var signer1 = new HmacJobAttestationSigner();
|
||||
var signer2 = new HmacJobAttestationSigner();
|
||||
var payload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
|
||||
var envelope = await signer1.SignAsync(payload, DsseEnvelope.InTotoPayloadType);
|
||||
var result = await signer2.VerifyAsync(envelope);
|
||||
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetCurrentKeyId_ReturnsConsistentValue()
|
||||
{
|
||||
var signer = new HmacJobAttestationSigner();
|
||||
|
||||
var keyId1 = signer.GetCurrentKeyId();
|
||||
var keyId2 = signer.GetCurrentKeyId();
|
||||
|
||||
Assert.Equal(keyId1, keyId2);
|
||||
Assert.StartsWith("hmac-key-", keyId1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_IncludesKeyIdInSignature()
|
||||
{
|
||||
var signer = new HmacJobAttestationSigner();
|
||||
var payload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
|
||||
var envelope = await signer.SignAsync(payload, DsseEnvelope.InTotoPayloadType);
|
||||
|
||||
Assert.Equal(signer.GetCurrentKeyId(), envelope.Signatures[0].KeyId);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for NoOpJobAttestationSigner.
|
||||
/// </summary>
|
||||
public sealed class NoOpJobAttestationSignerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SignAsync_CreatesEnvelopeWithPlaceholderSignature()
|
||||
{
|
||||
var signer = NoOpJobAttestationSigner.Instance;
|
||||
var payload = "{\"test\":\"value\"}"u8.ToArray();
|
||||
|
||||
var envelope = await signer.SignAsync(payload, DsseEnvelope.InTotoPayloadType);
|
||||
|
||||
Assert.NotNull(envelope);
|
||||
Assert.Equal("no-op", envelope.Signatures[0].KeyId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_AlwaysReturnsTrue()
|
||||
{
|
||||
var signer = NoOpJobAttestationSigner.Instance;
|
||||
var envelope = new DsseEnvelope(
|
||||
Payload: Convert.ToBase64String("{}"u8.ToArray()),
|
||||
PayloadType: DsseEnvelope.InTotoPayloadType,
|
||||
Signatures: new[] { new DsseSignature("any", "any") });
|
||||
|
||||
var result = await signer.VerifyAsync(envelope);
|
||||
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Instance_ReturnsSingleton()
|
||||
{
|
||||
Assert.Same(NoOpJobAttestationSigner.Instance, NoOpJobAttestationSigner.Instance);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for InMemoryJobAttestationStore.
|
||||
/// </summary>
|
||||
public sealed class InMemoryJobAttestationStoreTests
|
||||
{
|
||||
private JobAttestation CreateTestAttestation(Guid? attestationId = null, Guid? jobId = null, Guid? runId = null)
|
||||
{
|
||||
return new JobAttestation(
|
||||
AttestationId: attestationId ?? Guid.NewGuid(),
|
||||
TenantId: "tenant-1",
|
||||
JobId: jobId ?? Guid.NewGuid(),
|
||||
RunId: runId,
|
||||
ProjectId: null,
|
||||
StatementType: JobAttestation.InTotoStatementV1,
|
||||
PredicateType: JobPredicateTypes.JobCompletion,
|
||||
Subjects: new[] { new AttestationSubject("test", new Dictionary<string, string> { ["sha256"] = "abc" }) },
|
||||
Envelope: new DsseEnvelope("e30=", DsseEnvelope.InTotoPayloadType, new[] { new DsseSignature("k", "s") }),
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
PayloadDigest: "sha256:test",
|
||||
EvidencePointer: null);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Store_AddsAttestation()
|
||||
{
|
||||
var store = new InMemoryJobAttestationStore();
|
||||
var attestation = CreateTestAttestation();
|
||||
|
||||
await store.StoreAsync(attestation);
|
||||
|
||||
Assert.Equal(1, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsStoredAttestation()
|
||||
{
|
||||
var store = new InMemoryJobAttestationStore();
|
||||
var attestationId = Guid.NewGuid();
|
||||
var attestation = CreateTestAttestation(attestationId);
|
||||
|
||||
await store.StoreAsync(attestation);
|
||||
var retrieved = await store.GetAsync(attestationId);
|
||||
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(attestationId, retrieved.AttestationId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsNullForMissingAttestation()
|
||||
{
|
||||
var store = new InMemoryJobAttestationStore();
|
||||
|
||||
var retrieved = await store.GetAsync(Guid.NewGuid());
|
||||
|
||||
Assert.Null(retrieved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetForJob_ReturnsMatchingAttestations()
|
||||
{
|
||||
var store = new InMemoryJobAttestationStore();
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
await store.StoreAsync(CreateTestAttestation(jobId: jobId));
|
||||
await store.StoreAsync(CreateTestAttestation(jobId: jobId));
|
||||
await store.StoreAsync(CreateTestAttestation()); // Different job
|
||||
|
||||
var forJob = await store.GetForJobAsync(jobId);
|
||||
|
||||
Assert.Equal(2, forJob.Count);
|
||||
Assert.All(forJob, a => Assert.Equal(jobId, a.JobId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetForRun_ReturnsMatchingAttestations()
|
||||
{
|
||||
var store = new InMemoryJobAttestationStore();
|
||||
var runId = Guid.NewGuid();
|
||||
|
||||
await store.StoreAsync(CreateTestAttestation(runId: runId));
|
||||
await store.StoreAsync(CreateTestAttestation(runId: runId));
|
||||
await store.StoreAsync(CreateTestAttestation(runId: Guid.NewGuid()));
|
||||
|
||||
var forRun = await store.GetForRunAsync(runId);
|
||||
|
||||
Assert.Equal(2, forRun.Count);
|
||||
Assert.All(forRun, a => Assert.Equal(runId, a.RunId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Clear_RemovesAllAttestations()
|
||||
{
|
||||
var store = new InMemoryJobAttestationStore();
|
||||
store.StoreAsync(CreateTestAttestation()).Wait();
|
||||
store.StoreAsync(CreateTestAttestation()).Wait();
|
||||
|
||||
store.Clear();
|
||||
|
||||
Assert.Equal(0, store.Count);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JobAttestationService.
|
||||
/// </summary>
|
||||
public sealed class JobAttestationServiceTests
|
||||
{
|
||||
private readonly HmacJobAttestationSigner _signer;
|
||||
private readonly InMemoryJobAttestationStore _store;
|
||||
private readonly TestTimelineEventEmitter _emitter;
|
||||
private readonly JobAttestationService _service;
|
||||
|
||||
public JobAttestationServiceTests()
|
||||
{
|
||||
_signer = new HmacJobAttestationSigner();
|
||||
_store = new InMemoryJobAttestationStore();
|
||||
_emitter = new TestTimelineEventEmitter();
|
||||
|
||||
_service = new JobAttestationService(
|
||||
_signer,
|
||||
_store,
|
||||
_emitter,
|
||||
NullLogger<JobAttestationService>.Instance);
|
||||
}
|
||||
|
||||
private JobAttestationRequest CreateRequest(
|
||||
Guid? jobId = null,
|
||||
Guid? runId = null,
|
||||
string status = "completed") =>
|
||||
new(
|
||||
TenantId: "tenant-1",
|
||||
JobId: jobId ?? Guid.NewGuid(),
|
||||
RunId: runId,
|
||||
JobType: "test.job",
|
||||
ProjectId: "project-1",
|
||||
Status: status,
|
||||
ExitCode: status == "completed" ? 0 : 1,
|
||||
StartedAt: DateTimeOffset.UtcNow.AddMinutes(-5),
|
||||
CompletedAt: DateTimeOffset.UtcNow,
|
||||
DurationSeconds: 300,
|
||||
InputPayloadJson: "{\"input\":\"data\"}",
|
||||
OutputPayloadJson: "{\"output\":\"result\"}",
|
||||
Artifacts: null,
|
||||
Environment: null,
|
||||
Capsule: null);
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateJobCompletionAttestationAsync_CreatesValidAttestation()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
|
||||
var result = await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Attestation);
|
||||
Assert.NotNull(result.EvidencePointer);
|
||||
Assert.Equal(JobPredicateTypes.JobCompletion, result.Attestation.PredicateType);
|
||||
Assert.Equal(request.TenantId, result.Attestation.TenantId);
|
||||
Assert.Equal(request.JobId, result.Attestation.JobId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateJobCompletionAttestationAsync_StoresAttestation()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
|
||||
var result = await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
|
||||
Assert.Equal(1, _store.Count);
|
||||
var stored = await _store.GetAsync(result.Attestation!.AttestationId);
|
||||
Assert.NotNull(stored);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateJobCompletionAttestationAsync_EmitsTimelineEvent()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
|
||||
await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
|
||||
var emittedEvent = Assert.Single(_emitter.EmittedEvents);
|
||||
Assert.Equal("job.attestation.created", emittedEvent.EventType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateJobCompletionAttestationAsync_SignsEnvelope()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
|
||||
var result = await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
|
||||
Assert.NotNull(result.Attestation);
|
||||
Assert.NotEmpty(result.Attestation.Envelope.Signatures);
|
||||
var valid = await _signer.VerifyAsync(result.Attestation.Envelope);
|
||||
Assert.True(valid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateJobCompletionAttestationAsync_IncludesCapsuleReference()
|
||||
{
|
||||
var capsule = JobCapsule.Create(
|
||||
"tenant-1",
|
||||
Guid.NewGuid(),
|
||||
"test.job",
|
||||
JobCapsuleKind.JobCompletion,
|
||||
JobCapsuleInputs.FromPayload("{}"));
|
||||
var request = CreateRequest() with { Capsule = capsule };
|
||||
|
||||
var result = await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
|
||||
Assert.NotNull(result.Attestation);
|
||||
Assert.Equal(2, result.Attestation.Subjects.Count);
|
||||
Assert.Contains(result.Attestation.Subjects, s => s.Name.StartsWith("capsule:"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateJobSchedulingAttestationAsync_CreatesValidAttestation()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
|
||||
var result = await _service.GenerateJobSchedulingAttestationAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Attestation);
|
||||
Assert.Equal(JobPredicateTypes.JobScheduling, result.Attestation.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateRunCompletionAttestationAsync_CreatesValidAttestation()
|
||||
{
|
||||
var runId = Guid.NewGuid();
|
||||
var jobAttestations = new List<JobAttestation>();
|
||||
|
||||
// Create job attestations first
|
||||
for (int i = 0; i < 3; i++)
|
||||
{
|
||||
var jobRequest = CreateRequest(runId: runId);
|
||||
var jobResult = await _service.GenerateJobCompletionAttestationAsync(jobRequest);
|
||||
jobAttestations.Add(jobResult.Attestation!);
|
||||
}
|
||||
|
||||
_emitter.Clear();
|
||||
|
||||
var result = await _service.GenerateRunCompletionAttestationAsync(
|
||||
"tenant-1", runId, "project-1", jobAttestations);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Attestation);
|
||||
Assert.Equal(JobPredicateTypes.RunCompletion, result.Attestation.PredicateType);
|
||||
Assert.Equal(4, result.Attestation.Subjects.Count); // 1 run + 3 job attestations
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetJobAttestationAsync_ReturnsLatestCompletionAttestation()
|
||||
{
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
// Generate multiple attestations
|
||||
var request1 = CreateRequest(jobId: jobId);
|
||||
await _service.GenerateJobSchedulingAttestationAsync(request1);
|
||||
var request2 = CreateRequest(jobId: jobId);
|
||||
var completionResult = await _service.GenerateJobCompletionAttestationAsync(request2);
|
||||
|
||||
var retrieved = await _service.GetJobAttestationAsync(jobId);
|
||||
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(JobPredicateTypes.JobCompletion, retrieved.PredicateType);
|
||||
Assert.Equal(completionResult.Attestation!.AttestationId, retrieved.AttestationId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetJobAttestationAsync_ReturnsNullForNonexistentJob()
|
||||
{
|
||||
var result = await _service.GetJobAttestationAsync(Guid.NewGuid());
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAttestationAsync_ValidatesValidAttestation()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
var createResult = await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
|
||||
var verifyResult = await _service.VerifyAttestationAsync(createResult.Attestation!);
|
||||
|
||||
Assert.True(verifyResult.Valid);
|
||||
Assert.NotNull(verifyResult.SigningKeyId);
|
||||
Assert.NotNull(verifyResult.CreatedAt);
|
||||
Assert.Null(verifyResult.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAttestationAsync_DetectsTamperedPayload()
|
||||
{
|
||||
var request = CreateRequest();
|
||||
var createResult = await _service.GenerateJobCompletionAttestationAsync(request);
|
||||
var attestation = createResult.Attestation!;
|
||||
|
||||
// Create tampered attestation with wrong digest
|
||||
var tampered = attestation with { PayloadDigest = "sha256:tampered" };
|
||||
|
||||
var verifyResult = await _service.VerifyAttestationAsync(tampered);
|
||||
|
||||
Assert.False(verifyResult.Valid);
|
||||
Assert.Contains("digest mismatch", verifyResult.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_ThrowsOnNullDependencies()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => new JobAttestationService(
|
||||
null!, _store, _emitter, NullLogger<JobAttestationService>.Instance));
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => new JobAttestationService(
|
||||
_signer, null!, _emitter, NullLogger<JobAttestationService>.Instance));
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => new JobAttestationService(
|
||||
_signer, _store, null!, NullLogger<JobAttestationService>.Instance));
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => new JobAttestationService(
|
||||
_signer, _store, _emitter, null!));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test implementation of ITimelineEventEmitter.
|
||||
/// </summary>
|
||||
internal sealed class TestTimelineEventEmitter : ITimelineEventEmitter
|
||||
{
|
||||
private readonly List<(string TenantId, Guid JobId, string EventType)> _emittedEvents = new();
|
||||
|
||||
public IReadOnlyList<(string TenantId, Guid JobId, string EventType)> EmittedEvents => _emittedEvents;
|
||||
|
||||
public Task<TimelineEmitResult> EmitAsync(TimelineEvent evt, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((evt.TenantId, evt.JobId ?? Guid.Empty, evt.EventType));
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public Task<TimelineBatchEmitResult> EmitBatchAsync(IEnumerable<TimelineEvent> events, CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (var evt in events)
|
||||
{
|
||||
_emittedEvents.Add((evt.TenantId, evt.JobId ?? Guid.Empty, evt.EventType));
|
||||
}
|
||||
return Task.FromResult(new TimelineBatchEmitResult(events.Count(), 0, 0, Array.Empty<string>()));
|
||||
}
|
||||
|
||||
public Task<TimelineEmitResult> EmitJobEventAsync(
|
||||
string tenantId, Guid jobId, string eventType,
|
||||
object? payload = null, string? actor = null, string? correlationId = null,
|
||||
string? traceId = null, string? projectId = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((tenantId, jobId, eventType));
|
||||
var evt = TimelineEvent.Create(tenantId, eventType, "test", DateTimeOffset.UtcNow, jobId: jobId);
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public Task<TimelineEmitResult> EmitRunEventAsync(
|
||||
string tenantId, Guid runId, string eventType,
|
||||
object? payload = null, string? actor = null, string? correlationId = null,
|
||||
string? traceId = null, string? projectId = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((tenantId, runId, eventType));
|
||||
var evt = TimelineEvent.Create(tenantId, eventType, "test", DateTimeOffset.UtcNow, runId: runId);
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public void Clear() => _emittedEvents.Clear();
|
||||
}
|
||||
@@ -0,0 +1,367 @@
|
||||
using StellaOps.Orchestrator.Core.Evidence;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Evidence;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JobCapsule domain models.
|
||||
/// Per ORCH-OBS-53-001.
|
||||
/// </summary>
|
||||
public sealed class JobCapsuleTests
|
||||
{
|
||||
[Fact]
|
||||
public void JobCapsule_Create_GeneratesUniqueId()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule1 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobScheduling, inputs);
|
||||
var capsule2 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
Assert.NotEqual(capsule1.CapsuleId, capsule2.CapsuleId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_Create_SetsSchemaVersion()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
Assert.Equal(JobCapsule.CurrentSchemaVersion, capsule.SchemaVersion);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_Create_ComputesRootHash()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{\"key\":\"value\"}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
Assert.NotNull(capsule.RootHash);
|
||||
Assert.StartsWith("sha256:", capsule.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_ToJson_ProducesValidJson()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{\"format\":\"json\"}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
var json = capsule.ToJson();
|
||||
|
||||
Assert.NotNull(json);
|
||||
Assert.Contains("\"capsuleId\":", json);
|
||||
Assert.Contains("\"tenantId\":\"tenant-1\"", json);
|
||||
Assert.Contains("\"schemaVersion\":\"1.0.0\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_FromJson_Roundtrips()
|
||||
{
|
||||
var jobId = Guid.NewGuid();
|
||||
var inputs = JobCapsuleInputs.FromPayload("{\"format\":\"json\"}");
|
||||
var original = JobCapsule.Create("tenant-1", jobId, "export.ledger", JobCapsuleKind.JobScheduling, inputs, projectId: "proj-1");
|
||||
|
||||
var json = original.ToJson();
|
||||
var restored = JobCapsule.FromJson(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.CapsuleId, restored.CapsuleId);
|
||||
Assert.Equal(original.TenantId, restored.TenantId);
|
||||
Assert.Equal(original.JobId, restored.JobId);
|
||||
Assert.Equal(original.ProjectId, restored.ProjectId);
|
||||
Assert.Equal(original.RootHash, restored.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_ToEvidencePointer_CreatesValidPointer()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
var pointer = capsule.ToEvidencePointer();
|
||||
|
||||
Assert.NotNull(pointer);
|
||||
Assert.Equal(capsule.CapsuleId, pointer.BundleId);
|
||||
Assert.Equal(capsule.RootHash, pointer.BundleDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsuleInputs_FromPayload_ComputesHash()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{\"key\":\"value\"}");
|
||||
|
||||
Assert.NotNull(inputs.PayloadHash);
|
||||
Assert.StartsWith("sha256:", inputs.PayloadHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsuleInputs_SamePayload_SameHash()
|
||||
{
|
||||
var inputs1 = JobCapsuleInputs.FromPayload("{\"key\":\"value\"}");
|
||||
var inputs2 = JobCapsuleInputs.FromPayload("{\"key\":\"value\"}");
|
||||
|
||||
Assert.Equal(inputs1.PayloadHash, inputs2.PayloadHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsuleInputs_DifferentPayload_DifferentHash()
|
||||
{
|
||||
var inputs1 = JobCapsuleInputs.FromPayload("{\"key\":\"value1\"}");
|
||||
var inputs2 = JobCapsuleInputs.FromPayload("{\"key\":\"value2\"}");
|
||||
|
||||
Assert.NotEqual(inputs1.PayloadHash, inputs2.PayloadHash);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(JobCapsuleKind.JobScheduling)]
|
||||
[InlineData(JobCapsuleKind.JobCompletion)]
|
||||
[InlineData(JobCapsuleKind.JobFailure)]
|
||||
[InlineData(JobCapsuleKind.JobCancellation)]
|
||||
[InlineData(JobCapsuleKind.RunCompletion)]
|
||||
public void JobCapsule_SupportsAllKinds(JobCapsuleKind kind)
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "test.job", kind, inputs);
|
||||
|
||||
Assert.Equal(kind, capsule.Kind);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_WithArtifacts_IncludesInHash()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var artifacts = new List<JobCapsuleArtifact>
|
||||
{
|
||||
new("output.json", "sha256:abc123", 1024, "application/json", null, null)
|
||||
};
|
||||
|
||||
var capsule1 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobCompletion, inputs);
|
||||
var capsule2 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobCompletion, inputs, artifacts: artifacts);
|
||||
|
||||
// Different artifacts should result in different root hashes
|
||||
Assert.NotEqual(capsule1.RootHash, capsule2.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobCapsule_WithOutputs_IncludesInHash()
|
||||
{
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var outputs = new JobCapsuleOutputs(
|
||||
Status: "completed",
|
||||
ExitCode: 0,
|
||||
ResultSummary: "Success",
|
||||
ResultHash: "sha256:result123",
|
||||
DurationSeconds: 10.5,
|
||||
RetryCount: 0,
|
||||
Error: null);
|
||||
|
||||
var capsule1 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobCompletion, inputs);
|
||||
var capsule2 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "export.ledger", JobCapsuleKind.JobCompletion, inputs, outputs: outputs);
|
||||
|
||||
Assert.NotEqual(capsule1.RootHash, capsule2.RootHash);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JobRedactionGuard.
|
||||
/// </summary>
|
||||
public sealed class JobRedactionGuardTests
|
||||
{
|
||||
private readonly JobRedactionGuard _guard = new();
|
||||
|
||||
[Fact]
|
||||
public void RedactPayload_RedactsSensitiveProperties()
|
||||
{
|
||||
var payload = "{\"username\":\"admin\",\"password\":\"secret123\"}";
|
||||
|
||||
var redacted = _guard.RedactPayload(payload);
|
||||
|
||||
Assert.Contains("\"username\":\"admin\"", redacted);
|
||||
Assert.DoesNotContain("secret123", redacted);
|
||||
Assert.Contains("[REDACTED", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactPayload_RedactsApiKey()
|
||||
{
|
||||
var payload = "{\"api_key\":\"sk-abc123\"}";
|
||||
|
||||
var redacted = _guard.RedactPayload(payload);
|
||||
|
||||
Assert.DoesNotContain("sk-abc123", redacted);
|
||||
Assert.Contains("[REDACTED", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactPayload_RedactsToken()
|
||||
{
|
||||
var payload = "{\"token\":\"bearer_xyz\"}";
|
||||
|
||||
var redacted = _guard.RedactPayload(payload);
|
||||
|
||||
Assert.DoesNotContain("bearer_xyz", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactPayload_PreservesNonSensitiveData()
|
||||
{
|
||||
var payload = "{\"format\":\"json\",\"count\":100}";
|
||||
|
||||
var redacted = _guard.RedactPayload(payload);
|
||||
|
||||
Assert.Contains("\"format\":\"json\"", redacted);
|
||||
Assert.Contains("\"count\":100", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactPayload_TruncatesLongContent()
|
||||
{
|
||||
var options = new JobRedactionGuardOptions(
|
||||
SensitivePropertyPatterns: JobRedactionGuardOptions.Default.SensitivePropertyPatterns,
|
||||
SensitiveContentPatterns: JobRedactionGuardOptions.Default.SensitiveContentPatterns,
|
||||
HashRedactedValues: true,
|
||||
MaxOutputLength: 100,
|
||||
PreserveEmailDomain: false);
|
||||
|
||||
var guard = new JobRedactionGuard(options);
|
||||
var payload = new string('x', 200);
|
||||
|
||||
var redacted = guard.RedactPayload(payload);
|
||||
|
||||
Assert.True(redacted.Length <= 100);
|
||||
Assert.EndsWith("[TRUNCATED]", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactIdentity_RedactsEmail()
|
||||
{
|
||||
var identity = "john.doe@example.com";
|
||||
|
||||
var redacted = _guard.RedactIdentity(identity);
|
||||
|
||||
Assert.DoesNotContain("john.doe", redacted);
|
||||
Assert.DoesNotContain("example.com", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactIdentity_PreservesEmailDomainWhenConfigured()
|
||||
{
|
||||
var options = new JobRedactionGuardOptions(
|
||||
SensitivePropertyPatterns: JobRedactionGuardOptions.Default.SensitivePropertyPatterns,
|
||||
SensitiveContentPatterns: JobRedactionGuardOptions.Default.SensitiveContentPatterns,
|
||||
HashRedactedValues: true,
|
||||
MaxOutputLength: 64 * 1024,
|
||||
PreserveEmailDomain: true);
|
||||
|
||||
var guard = new JobRedactionGuard(options);
|
||||
var identity = "john.doe@example.com";
|
||||
|
||||
var redacted = guard.RedactIdentity(identity);
|
||||
|
||||
Assert.DoesNotContain("john.doe", redacted);
|
||||
Assert.Contains("@example.com", redacted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedactError_RedactsSensitiveContent()
|
||||
{
|
||||
var error = new JobCapsuleError(
|
||||
Code: "AUTH_FAILED",
|
||||
Message: "Authentication failed with token: bearer_secret123",
|
||||
Category: "authentication",
|
||||
Retryable: false);
|
||||
|
||||
var redacted = _guard.RedactError(error);
|
||||
|
||||
Assert.DoesNotContain("bearer_secret123", redacted.Message);
|
||||
Assert.Contains("[REDACTED", redacted.Message);
|
||||
Assert.Equal(error.Code, redacted.Code);
|
||||
Assert.Equal(error.Category, redacted.Category);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NoOpJobRedactionGuard_PreservesAllData()
|
||||
{
|
||||
var guard = NoOpJobRedactionGuard.Instance;
|
||||
var payload = "{\"password\":\"secret\"}";
|
||||
|
||||
var redacted = guard.RedactPayload(payload);
|
||||
|
||||
Assert.Equal(payload, redacted);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for InMemoryJobCapsuleStore.
|
||||
/// </summary>
|
||||
public sealed class InMemoryJobCapsuleStoreTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Store_StoresCapsule()
|
||||
{
|
||||
var store = new InMemoryJobCapsuleStore();
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "test.job", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
await store.StoreAsync(capsule);
|
||||
|
||||
Assert.Equal(1, store.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsCapsule()
|
||||
{
|
||||
var store = new InMemoryJobCapsuleStore();
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "test.job", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
await store.StoreAsync(capsule);
|
||||
var retrieved = await store.GetAsync(capsule.CapsuleId);
|
||||
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(capsule.CapsuleId, retrieved.CapsuleId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Get_ReturnsNullForMissingCapsule()
|
||||
{
|
||||
var store = new InMemoryJobCapsuleStore();
|
||||
|
||||
var retrieved = await store.GetAsync(Guid.NewGuid());
|
||||
|
||||
Assert.Null(retrieved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListForJob_ReturnsMatchingCapsules()
|
||||
{
|
||||
var store = new InMemoryJobCapsuleStore();
|
||||
var jobId = Guid.NewGuid();
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
|
||||
var capsule1 = JobCapsule.Create("tenant-1", jobId, "test.job", JobCapsuleKind.JobScheduling, inputs);
|
||||
var capsule2 = JobCapsule.Create("tenant-1", jobId, "test.job", JobCapsuleKind.JobCompletion, inputs);
|
||||
var capsule3 = JobCapsule.Create("tenant-1", Guid.NewGuid(), "test.job", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
await store.StoreAsync(capsule1);
|
||||
await store.StoreAsync(capsule2);
|
||||
await store.StoreAsync(capsule3);
|
||||
|
||||
var forJob = await store.ListForJobAsync(jobId);
|
||||
|
||||
Assert.Equal(2, forJob.Count);
|
||||
Assert.All(forJob, c => Assert.Equal(jobId, c.JobId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Clear_RemovesAllCapsules()
|
||||
{
|
||||
var store = new InMemoryJobCapsuleStore();
|
||||
var inputs = JobCapsuleInputs.FromPayload("{}");
|
||||
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "test.job", JobCapsuleKind.JobScheduling, inputs);
|
||||
|
||||
store.StoreAsync(capsule).Wait();
|
||||
Assert.Equal(1, store.Count);
|
||||
|
||||
store.Clear();
|
||||
|
||||
Assert.Equal(0, store.Count);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,298 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.AirGap;
|
||||
using StellaOps.Orchestrator.Core.Domain.Mirror;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Mirror;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorBundle domain models.
|
||||
/// Per ORCH-AIRGAP-57-001.
|
||||
/// </summary>
|
||||
public sealed class MirrorBundleTests
|
||||
{
|
||||
[Fact]
|
||||
public void MirrorBundlePayload_Default_HasExpectedValues()
|
||||
{
|
||||
var domains = new List<string> { "vex-advisories", "vulnerability-feeds" };
|
||||
var payload = MirrorBundlePayload.Default(domains);
|
||||
|
||||
Assert.Equal(domains, payload.Domains);
|
||||
Assert.Null(payload.StartTime);
|
||||
Assert.Null(payload.EndTime);
|
||||
Assert.Null(payload.TargetEnvironment);
|
||||
Assert.Null(payload.MaxStalenessSeconds);
|
||||
Assert.True(payload.IncludeProvenance);
|
||||
Assert.True(payload.IncludeAuditTrail);
|
||||
Assert.True(payload.SignBundle);
|
||||
Assert.Null(payload.SigningKeyId);
|
||||
Assert.Equal("gzip", payload.Compression);
|
||||
Assert.Null(payload.DestinationUri);
|
||||
Assert.True(payload.IncludeTimeAnchor);
|
||||
Assert.Null(payload.Options);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundlePayload_ToJson_ProducesValidJson()
|
||||
{
|
||||
var payload = MirrorBundlePayload.Default(["vex-advisories"]);
|
||||
var json = payload.ToJson();
|
||||
|
||||
Assert.NotNull(json);
|
||||
Assert.Contains("\"domains\":", json);
|
||||
Assert.Contains("\"includeProvenance\":true", json);
|
||||
Assert.Contains("\"compression\":\"gzip\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundlePayload_FromJson_Roundtrips()
|
||||
{
|
||||
var original = new MirrorBundlePayload(
|
||||
Domains: ["vex-advisories", "vulnerability-feeds"],
|
||||
StartTime: DateTimeOffset.Parse("2025-01-01T00:00:00Z"),
|
||||
EndTime: DateTimeOffset.Parse("2025-12-01T00:00:00Z"),
|
||||
TargetEnvironment: "air-gapped-prod",
|
||||
MaxStalenessSeconds: 86400,
|
||||
IncludeProvenance: true,
|
||||
IncludeAuditTrail: true,
|
||||
SignBundle: true,
|
||||
SigningKeyId: "key-123",
|
||||
Compression: "zstd",
|
||||
DestinationUri: "s3://bundles/export.tar.gz",
|
||||
IncludeTimeAnchor: true,
|
||||
Options: new Dictionary<string, string> { ["extra"] = "value" });
|
||||
|
||||
var json = original.ToJson();
|
||||
var restored = MirrorBundlePayload.FromJson(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.Domains, restored.Domains);
|
||||
Assert.Equal(original.TargetEnvironment, restored.TargetEnvironment);
|
||||
Assert.Equal(original.MaxStalenessSeconds, restored.MaxStalenessSeconds);
|
||||
Assert.Equal(original.SigningKeyId, restored.SigningKeyId);
|
||||
Assert.Equal(original.Compression, restored.Compression);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundlePayload_ComputeDigest_IsDeterministic()
|
||||
{
|
||||
var payload = MirrorBundlePayload.Default(["vex-advisories"]);
|
||||
|
||||
var digest1 = payload.ComputeDigest();
|
||||
var digest2 = payload.ComputeDigest();
|
||||
|
||||
Assert.Equal(digest1, digest2);
|
||||
Assert.StartsWith("sha256:", digest1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundlePayload_ComputeDigest_DifferentPayloadsHaveDifferentDigests()
|
||||
{
|
||||
var payload1 = MirrorBundlePayload.Default(["vex-advisories"]);
|
||||
var payload2 = MirrorBundlePayload.Default(["vulnerability-feeds"]);
|
||||
|
||||
Assert.NotEqual(payload1.ComputeDigest(), payload2.ComputeDigest());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundlePayload_FromJson_ReturnsNullForInvalidJson()
|
||||
{
|
||||
Assert.Null(MirrorBundlePayload.FromJson("not valid json"));
|
||||
Assert.Null(MirrorBundlePayload.FromJson("{invalid}"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundleResult_ToJson_ProducesValidJson()
|
||||
{
|
||||
var result = new MirrorBundleResult(
|
||||
OutputUri: "s3://bundles/bundle-123.tar.gz",
|
||||
BundleDigest: "sha256:abc123",
|
||||
ManifestDigest: "sha256:def456",
|
||||
BundleSizeBytes: 1024000,
|
||||
IncludedDomains: ["vex-advisories"],
|
||||
Exports: [new ExportRecord(
|
||||
ExportId: Guid.NewGuid(),
|
||||
Key: "vex-advisories",
|
||||
Format: ExportFormat.OpenVex,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
ArtifactDigest: "sha256:abc",
|
||||
RecordCount: 100)],
|
||||
ProvenanceUri: "s3://bundles/bundle-123.provenance.json",
|
||||
AuditTrailUri: "s3://bundles/bundle-123.audit.ndjson",
|
||||
AuditEntryCount: 50,
|
||||
TimeAnchor: new TimeAnchor(
|
||||
AnchorType: TimeAnchorType.Ntp,
|
||||
Timestamp: DateTimeOffset.UtcNow,
|
||||
Source: "pool.ntp.org",
|
||||
Uncertainty: 100,
|
||||
SignatureDigest: null,
|
||||
Verified: true),
|
||||
Compression: "gzip",
|
||||
SourceEnvironment: "prod",
|
||||
TargetEnvironment: "air-gapped",
|
||||
GeneratedAt: DateTimeOffset.UtcNow,
|
||||
DurationSeconds: 15.5,
|
||||
Signature: null);
|
||||
|
||||
var json = result.ToJson();
|
||||
|
||||
Assert.NotNull(json);
|
||||
Assert.Contains("\"outputUri\":", json);
|
||||
Assert.Contains("\"bundleDigest\":\"sha256:abc123\"", json);
|
||||
Assert.Contains("\"auditEntryCount\":50", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundleProgress_ProgressPercent_CalculatesCorrectly()
|
||||
{
|
||||
var progress = new MirrorBundleProgress(
|
||||
Phase: MirrorPhase.CollectingDomainData,
|
||||
DomainsProcessed: 2,
|
||||
TotalDomains: 4,
|
||||
RecordsProcessed: 100,
|
||||
BytesWritten: 10240,
|
||||
AuditEntriesCollected: 25,
|
||||
Message: "Processing vex-advisories");
|
||||
|
||||
Assert.Equal(50.0, progress.ProgressPercent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundleProgress_ProgressPercent_ReturnsNullWhenTotalIsZero()
|
||||
{
|
||||
var progress = new MirrorBundleProgress(
|
||||
Phase: MirrorPhase.Initializing,
|
||||
DomainsProcessed: 0,
|
||||
TotalDomains: 0,
|
||||
RecordsProcessed: 0,
|
||||
BytesWritten: 0,
|
||||
AuditEntriesCollected: 0,
|
||||
Message: null);
|
||||
|
||||
Assert.Null(progress.ProgressPercent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorAuditEntry_ComputeDigest_IsDeterministic()
|
||||
{
|
||||
var entry = new MirrorAuditEntry(
|
||||
EntryId: Guid.Parse("12345678-1234-1234-1234-123456789012"),
|
||||
EventType: "bundle.created",
|
||||
Timestamp: DateTimeOffset.Parse("2025-12-01T12:00:00Z"),
|
||||
Actor: "system",
|
||||
DomainId: "vex-advisories",
|
||||
EntityId: Guid.Parse("87654321-1234-1234-1234-123456789012"),
|
||||
Details: "Bundle created successfully",
|
||||
ContentHash: "sha256:abc",
|
||||
CorrelationId: "corr-123");
|
||||
|
||||
var digest1 = entry.ComputeDigest();
|
||||
var digest2 = entry.ComputeDigest();
|
||||
|
||||
Assert.Equal(digest1, digest2);
|
||||
Assert.StartsWith("sha256:", digest1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundleManifest_ComputeDigest_IsDeterministic()
|
||||
{
|
||||
var manifest = new MirrorBundleManifest(
|
||||
BundleId: Guid.NewGuid(),
|
||||
SchemaVersion: MirrorBundleManifest.CurrentSchemaVersion,
|
||||
SourceEnvironment: "prod",
|
||||
TargetEnvironment: "air-gapped",
|
||||
CreatedAt: DateTimeOffset.Parse("2025-12-01T12:00:00Z"),
|
||||
Domains: [new MirrorDomainEntry(
|
||||
DomainId: "vex-advisories",
|
||||
Format: ExportFormat.OpenVex,
|
||||
FilePath: "exports/vex-advisories.json",
|
||||
Digest: "sha256:abc",
|
||||
SizeBytes: 1024,
|
||||
RecordCount: 100,
|
||||
SourceTimestamp: DateTimeOffset.Parse("2025-12-01T00:00:00Z"),
|
||||
StalenessSeconds: 43200)],
|
||||
TimeAnchor: null,
|
||||
Provenance: new BundleProvenance(
|
||||
BundleId: Guid.NewGuid(),
|
||||
DomainId: "vex-advisories",
|
||||
ImportedAt: DateTimeOffset.UtcNow,
|
||||
SourceTimestamp: DateTimeOffset.UtcNow.AddHours(-12),
|
||||
SourceEnvironment: "prod",
|
||||
BundleDigest: "sha256:abc",
|
||||
ManifestDigest: "sha256:def",
|
||||
TimeAnchor: null,
|
||||
Exports: null,
|
||||
Metadata: null),
|
||||
AuditSummary: null,
|
||||
Metadata: null);
|
||||
|
||||
var digest1 = manifest.ComputeDigest();
|
||||
var digest2 = manifest.ComputeDigest();
|
||||
|
||||
Assert.Equal(digest1, digest2);
|
||||
Assert.StartsWith("sha256:", digest1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundleManifest_CurrentSchemaVersion_Is1_0_0()
|
||||
{
|
||||
Assert.Equal("1.0.0", MirrorBundleManifest.CurrentSchemaVersion);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(MirrorPhase.Initializing, 0)]
|
||||
[InlineData(MirrorPhase.ValidatingStaleness, 1)]
|
||||
[InlineData(MirrorPhase.CollectingDomainData, 2)]
|
||||
[InlineData(MirrorPhase.CollectingAuditTrail, 3)]
|
||||
[InlineData(MirrorPhase.GeneratingProvenance, 4)]
|
||||
[InlineData(MirrorPhase.CreatingTimeAnchor, 5)]
|
||||
[InlineData(MirrorPhase.Compressing, 6)]
|
||||
[InlineData(MirrorPhase.Signing, 7)]
|
||||
[InlineData(MirrorPhase.Uploading, 8)]
|
||||
[InlineData(MirrorPhase.Finalizing, 9)]
|
||||
[InlineData(MirrorPhase.Completed, 10)]
|
||||
public void MirrorPhase_HasExpectedValues(MirrorPhase phase, int expectedValue)
|
||||
{
|
||||
Assert.Equal(expectedValue, (int)phase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorBundleSignature_StoresAllFields()
|
||||
{
|
||||
var signature = new MirrorBundleSignature(
|
||||
Algorithm: "ECDSA-P256-SHA256",
|
||||
KeyId: "key-123",
|
||||
SignatureValue: "base64signature==",
|
||||
SignedAt: DateTimeOffset.Parse("2025-12-01T12:00:00Z"),
|
||||
PayloadType: "application/vnd.stellaops.bundle+json",
|
||||
EnvelopeUri: "s3://bundles/bundle.dsse");
|
||||
|
||||
Assert.Equal("ECDSA-P256-SHA256", signature.Algorithm);
|
||||
Assert.Equal("key-123", signature.KeyId);
|
||||
Assert.Equal("base64signature==", signature.SignatureValue);
|
||||
Assert.Equal("application/vnd.stellaops.bundle+json", signature.PayloadType);
|
||||
Assert.Equal("s3://bundles/bundle.dsse", signature.EnvelopeUri);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MirrorAuditSummary_StoresAllFields()
|
||||
{
|
||||
var summary = new MirrorAuditSummary(
|
||||
TotalEntries: 100,
|
||||
FilePath: "audit/trail.ndjson",
|
||||
Digest: "sha256:abc123",
|
||||
SizeBytes: 51200,
|
||||
EarliestEntry: DateTimeOffset.Parse("2025-11-01T00:00:00Z"),
|
||||
LatestEntry: DateTimeOffset.Parse("2025-12-01T00:00:00Z"),
|
||||
EventTypeCounts: new Dictionary<string, int>
|
||||
{
|
||||
["bundle.created"] = 10,
|
||||
["bundle.imported"] = 20,
|
||||
["domain.updated"] = 70
|
||||
});
|
||||
|
||||
Assert.Equal(100, summary.TotalEntries);
|
||||
Assert.Equal("audit/trail.ndjson", summary.FilePath);
|
||||
Assert.Equal("sha256:abc123", summary.Digest);
|
||||
Assert.Equal(51200, summary.SizeBytes);
|
||||
Assert.Equal(3, summary.EventTypeCounts.Count);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
using StellaOps.Orchestrator.Core.Domain.Mirror;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Mirror;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for MirrorJobTypes constants and helpers.
|
||||
/// Per ORCH-AIRGAP-57-001.
|
||||
/// </summary>
|
||||
public sealed class MirrorJobTypesTests
|
||||
{
|
||||
[Fact]
|
||||
public void Prefix_HasExpectedValue()
|
||||
{
|
||||
Assert.Equal("mirror.", MirrorJobTypes.Prefix);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void All_ContainsAllDefinedTypes()
|
||||
{
|
||||
Assert.Contains(MirrorJobTypes.Bundle, MirrorJobTypes.All);
|
||||
Assert.Contains(MirrorJobTypes.Import, MirrorJobTypes.All);
|
||||
Assert.Contains(MirrorJobTypes.Verify, MirrorJobTypes.All);
|
||||
Assert.Contains(MirrorJobTypes.Sync, MirrorJobTypes.All);
|
||||
Assert.Contains(MirrorJobTypes.Diff, MirrorJobTypes.All);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void All_TypesStartWithPrefix()
|
||||
{
|
||||
foreach (var jobType in MirrorJobTypes.All)
|
||||
{
|
||||
Assert.StartsWith(MirrorJobTypes.Prefix, jobType);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("mirror.bundle", true)]
|
||||
[InlineData("mirror.import", true)]
|
||||
[InlineData("mirror.verify", true)]
|
||||
[InlineData("mirror.sync", true)]
|
||||
[InlineData("mirror.diff", true)]
|
||||
[InlineData("mirror.custom", true)]
|
||||
[InlineData("MIRROR.BUNDLE", true)]
|
||||
[InlineData("export.ledger", false)]
|
||||
[InlineData("scan.image", false)]
|
||||
[InlineData("", false)]
|
||||
[InlineData(null, false)]
|
||||
public void IsMirrorJob_ReturnsCorrectResult(string? jobType, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, MirrorJobTypes.IsMirrorJob(jobType));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("mirror.bundle", "bundle")]
|
||||
[InlineData("mirror.import", "import")]
|
||||
[InlineData("mirror.verify", "verify")]
|
||||
[InlineData("mirror.sync", "sync")]
|
||||
[InlineData("mirror.diff", "diff")]
|
||||
[InlineData("mirror.custom-operation", "custom-operation")]
|
||||
public void GetMirrorOperation_ReturnsOperationForMirrorJob(string jobType, string expectedOperation)
|
||||
{
|
||||
Assert.Equal(expectedOperation, MirrorJobTypes.GetMirrorOperation(jobType));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("export.ledger")]
|
||||
[InlineData("scan.image")]
|
||||
[InlineData("")]
|
||||
[InlineData(null)]
|
||||
public void GetMirrorOperation_ReturnsNullForNonMirrorJob(string? jobType)
|
||||
{
|
||||
Assert.Null(MirrorJobTypes.GetMirrorOperation(jobType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetMirrorOperation_ReturnsNullForPrefixOnly()
|
||||
{
|
||||
Assert.Null(MirrorJobTypes.GetMirrorOperation("mirror."));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobTypes_HaveExpectedValues()
|
||||
{
|
||||
Assert.Equal("mirror.bundle", MirrorJobTypes.Bundle);
|
||||
Assert.Equal("mirror.import", MirrorJobTypes.Import);
|
||||
Assert.Equal("mirror.verify", MirrorJobTypes.Verify);
|
||||
Assert.Equal("mirror.sync", MirrorJobTypes.Sync);
|
||||
Assert.Equal("mirror.diff", MirrorJobTypes.Diff);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,542 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Orchestrator.Core.Domain.Events;
|
||||
using StellaOps.Orchestrator.Core.Observability;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Observability;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for IncidentModeHooks.
|
||||
/// Per ORCH-OBS-55-001: Incident mode hooks with sampling overrides,
|
||||
/// extended retention, debug spans, and automatic activation on SLO burn-rate breach.
|
||||
/// </summary>
|
||||
public class IncidentModeHooksTests
|
||||
{
|
||||
private readonly TestIncidentModeEmitter _testEmitter;
|
||||
private readonly IncidentModeHooks _sut;
|
||||
private readonly IncidentModeHooksOptions _options;
|
||||
|
||||
public IncidentModeHooksTests()
|
||||
{
|
||||
_testEmitter = new TestIncidentModeEmitter();
|
||||
|
||||
_options = new IncidentModeHooksOptions
|
||||
{
|
||||
DefaultTtl = TimeSpan.FromHours(4),
|
||||
BurnRateActivationThreshold = 6.0,
|
||||
SamplingRateOverride = 1.0,
|
||||
RetentionOverride = TimeSpan.FromDays(30),
|
||||
NormalSamplingRate = 0.1,
|
||||
NormalRetention = TimeSpan.FromDays(7),
|
||||
EnableDebugSpans = true,
|
||||
ReactivationCooldown = TimeSpan.FromMinutes(15)
|
||||
};
|
||||
|
||||
_sut = new IncidentModeHooks(
|
||||
_testEmitter,
|
||||
NullLogger<IncidentModeHooks>.Instance,
|
||||
_options);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_WithNullEmitter_ThrowsArgumentNullException()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
new IncidentModeHooks(null!, NullLogger<IncidentModeHooks>.Instance));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_WithNullLogger_ThrowsArgumentNullException()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
new IncidentModeHooks(_testEmitter, null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsActive_WhenNotActivated_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var isActive = _sut.IsActive("tenant-1");
|
||||
|
||||
// Assert
|
||||
Assert.False(isActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_ActivatesIncidentMode()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var actor = "test-user";
|
||||
var reason = "Manual activation for testing";
|
||||
|
||||
// Act
|
||||
var result = await _sut.ActivateAsync(tenantId, actor, reason);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.False(result.WasAlreadyActive);
|
||||
Assert.True(result.State.IsActive);
|
||||
Assert.Equal(actor, result.State.ActivatedBy);
|
||||
Assert.Equal(reason, result.State.ActivationReason);
|
||||
Assert.Equal(IncidentModeSource.Manual, result.State.Source);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_WithApiActor_SetsApiSource()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var actor = "api:automation-service";
|
||||
var reason = "API activation";
|
||||
|
||||
// Act
|
||||
var result = await _sut.ActivateAsync(tenantId, actor, reason);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(IncidentModeSource.Api, result.State.Source);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_WithCliActor_SetsCliSource()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var actor = "cli:admin";
|
||||
var reason = "CLI activation";
|
||||
|
||||
// Act
|
||||
var result = await _sut.ActivateAsync(tenantId, actor, reason);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(IncidentModeSource.Cli, result.State.Source);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_WithCustomTtl_SetsExpirationCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var customTtl = TimeSpan.FromHours(2);
|
||||
|
||||
// Act
|
||||
var result = await _sut.ActivateAsync(tenantId, "test", "reason", customTtl);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.State.ExpiresAt.HasValue);
|
||||
var expectedExpiry = result.State.ActivatedAt!.Value + customTtl;
|
||||
Assert.Equal(expectedExpiry, result.State.ExpiresAt.Value);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_WhenAlreadyActive_ReturnsAlreadyActive()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "first-user", "first activation");
|
||||
|
||||
// Act
|
||||
var result = await _sut.ActivateAsync(tenantId, "second-user", "second activation");
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.True(result.WasAlreadyActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_EmitsTimelineEvent()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
|
||||
// Act
|
||||
await _sut.ActivateAsync(tenantId, "test", "reason");
|
||||
|
||||
// Assert
|
||||
Assert.Contains(_testEmitter.EmittedEvents,
|
||||
e => e.TenantId == tenantId && e.EventType == "orchestrator.incident_mode.activated");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeactivateAsync_DeactivatesIncidentMode()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "test", "activation");
|
||||
|
||||
// Act
|
||||
var result = await _sut.DeactivateAsync(tenantId, "test", "issue resolved");
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.True(result.WasActive);
|
||||
Assert.False(_sut.IsActive(tenantId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeactivateAsync_WhenNotActive_ReturnsWasNotActive()
|
||||
{
|
||||
// Act
|
||||
var result = await _sut.DeactivateAsync("tenant-1", "test", "reason");
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.False(result.WasActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeactivateAsync_EmitsTimelineEvent()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "test", "activation");
|
||||
_testEmitter.Clear();
|
||||
|
||||
// Act
|
||||
await _sut.DeactivateAsync(tenantId, "test", "resolved");
|
||||
|
||||
// Assert
|
||||
Assert.Contains(_testEmitter.EmittedEvents,
|
||||
e => e.TenantId == tenantId && e.EventType == "orchestrator.incident_mode.deactivated");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateBurnRateBreachAsync_BelowThreshold_DoesNotActivate()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var burnRate = 3.0; // Below 6.0 threshold
|
||||
|
||||
// Act
|
||||
var result = await _sut.EvaluateBurnRateBreachAsync(tenantId, "test_slo", burnRate, 6.0);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Success);
|
||||
Assert.False(_sut.IsActive(tenantId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateBurnRateBreachAsync_AboveThreshold_ActivatesIncidentMode()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var burnRate = 10.0; // Above 6.0 threshold
|
||||
|
||||
// Act
|
||||
var result = await _sut.EvaluateBurnRateBreachAsync(tenantId, "test_slo", burnRate, 6.0);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.True(_sut.IsActive(tenantId));
|
||||
Assert.Equal(IncidentModeSource.BurnRateAlert, result.State.Source);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateBurnRateBreachAsync_DuringCooldown_DoesNotReactivate()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.EvaluateBurnRateBreachAsync(tenantId, "test_slo", 10.0, 6.0);
|
||||
await _sut.DeactivateAsync(tenantId, "system", "recovered");
|
||||
|
||||
// Act - Try to reactivate immediately (within cooldown)
|
||||
var result = await _sut.EvaluateBurnRateBreachAsync(tenantId, "test_slo", 10.0, 6.0);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Success);
|
||||
Assert.Contains("Cooldown", result.ErrorMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetState_ReturnsCurrentState()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "test", "reason");
|
||||
|
||||
// Act
|
||||
var state = _sut.GetState(tenantId);
|
||||
|
||||
// Assert
|
||||
Assert.True(state.IsActive);
|
||||
Assert.NotNull(state.ActivatedAt);
|
||||
Assert.NotNull(state.ExpiresAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetState_WhenNotActivated_ReturnsInactiveState()
|
||||
{
|
||||
// Act
|
||||
var state = _sut.GetState("tenant-1");
|
||||
|
||||
// Assert
|
||||
Assert.False(state.IsActive);
|
||||
Assert.Equal(IncidentModeState.Inactive, state);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEffectiveSamplingRate_WhenActive_ReturnsOverrideRate()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "test", "reason");
|
||||
|
||||
// Act
|
||||
var rate = _sut.GetEffectiveSamplingRate(tenantId);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(_options.SamplingRateOverride, rate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetEffectiveSamplingRate_WhenNotActive_ReturnsNormalRate()
|
||||
{
|
||||
// Act
|
||||
var rate = _sut.GetEffectiveSamplingRate("tenant-1");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(_options.NormalSamplingRate, rate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEffectiveRetention_WhenActive_ReturnsOverrideRetention()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "test", "reason");
|
||||
|
||||
// Act
|
||||
var retention = _sut.GetEffectiveRetention(tenantId);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(_options.RetentionOverride, retention);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetEffectiveRetention_WhenNotActive_ReturnsNormalRetention()
|
||||
{
|
||||
// Act
|
||||
var retention = _sut.GetEffectiveRetention("tenant-1");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(_options.NormalRetention, retention);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsDebugSpansEnabled_WhenActive_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
await _sut.ActivateAsync(tenantId, "test", "reason");
|
||||
|
||||
// Act
|
||||
var enabled = _sut.IsDebugSpansEnabled(tenantId);
|
||||
|
||||
// Assert
|
||||
Assert.True(enabled);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsDebugSpansEnabled_WhenNotActive_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var enabled = _sut.IsDebugSpansEnabled("tenant-1");
|
||||
|
||||
// Assert
|
||||
Assert.False(enabled);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for IncidentModeActivationResult.
|
||||
/// </summary>
|
||||
public class IncidentModeActivationResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Activated_CreatesSuccessResult()
|
||||
{
|
||||
// Arrange
|
||||
var state = new IncidentModeState(
|
||||
true, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow.AddHours(4),
|
||||
"test", "reason", IncidentModeSource.Manual,
|
||||
1.0, TimeSpan.FromDays(30), true);
|
||||
|
||||
// Act
|
||||
var result = IncidentModeActivationResult.Activated(state);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.False(result.WasAlreadyActive);
|
||||
Assert.Equal(state, result.State);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AlreadyActive_CreatesSuccessResultWithFlag()
|
||||
{
|
||||
// Arrange
|
||||
var state = new IncidentModeState(
|
||||
true, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow.AddHours(4),
|
||||
"test", "reason", IncidentModeSource.Manual,
|
||||
1.0, TimeSpan.FromDays(30), true);
|
||||
|
||||
// Act
|
||||
var result = IncidentModeActivationResult.AlreadyActive(state);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.True(result.WasAlreadyActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Failed_CreatesErrorResult()
|
||||
{
|
||||
// Act
|
||||
var result = IncidentModeActivationResult.Failed("test error");
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal("test error", result.ErrorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for IncidentModeDeactivationResult.
|
||||
/// </summary>
|
||||
public class IncidentModeDeactivationResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Deactivated_CreatesSuccessResult()
|
||||
{
|
||||
// Act
|
||||
var result = IncidentModeDeactivationResult.Deactivated();
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.True(result.WasActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WasNotActive_CreatesSuccessResultWithFlag()
|
||||
{
|
||||
// Act
|
||||
var result = IncidentModeDeactivationResult.WasNotActive();
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.False(result.WasActive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Failed_CreatesErrorResult()
|
||||
{
|
||||
// Act
|
||||
var result = IncidentModeDeactivationResult.Failed("test error");
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Success);
|
||||
Assert.Equal("test error", result.ErrorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for IncidentModeState.
|
||||
/// </summary>
|
||||
public class IncidentModeStateTests
|
||||
{
|
||||
[Fact]
|
||||
public void Inactive_HasCorrectDefaults()
|
||||
{
|
||||
// Act
|
||||
var state = IncidentModeState.Inactive;
|
||||
|
||||
// Assert
|
||||
Assert.False(state.IsActive);
|
||||
Assert.Null(state.ActivatedAt);
|
||||
Assert.Null(state.ExpiresAt);
|
||||
Assert.Null(state.ActivatedBy);
|
||||
Assert.Null(state.ActivationReason);
|
||||
Assert.Equal(IncidentModeSource.None, state.Source);
|
||||
Assert.Equal(0.0, state.SamplingRateOverride);
|
||||
Assert.Equal(TimeSpan.Zero, state.RetentionOverride);
|
||||
Assert.False(state.DebugSpansEnabled);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for IncidentModeHooksOptions.
|
||||
/// </summary>
|
||||
public class IncidentModeHooksOptionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void DefaultValues_AreCorrect()
|
||||
{
|
||||
// Arrange
|
||||
var options = new IncidentModeHooksOptions();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(TimeSpan.FromHours(4), options.DefaultTtl);
|
||||
Assert.Equal(6.0, options.BurnRateActivationThreshold);
|
||||
Assert.Equal(1.0, options.SamplingRateOverride);
|
||||
Assert.Equal(TimeSpan.FromDays(30), options.RetentionOverride);
|
||||
Assert.Equal(0.1, options.NormalSamplingRate);
|
||||
Assert.Equal(TimeSpan.FromDays(7), options.NormalRetention);
|
||||
Assert.True(options.EnableDebugSpans);
|
||||
Assert.Equal(TimeSpan.FromMinutes(15), options.ReactivationCooldown);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SectionName_IsCorrect()
|
||||
{
|
||||
Assert.Equal("Orchestrator:IncidentMode", IncidentModeHooksOptions.SectionName);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test implementation of ITimelineEventEmitter for incident mode tests.
|
||||
/// </summary>
|
||||
internal sealed class TestIncidentModeEmitter : ITimelineEventEmitter
|
||||
{
|
||||
private readonly List<(string TenantId, string EventType)> _emittedEvents = new();
|
||||
|
||||
public IReadOnlyList<(string TenantId, string EventType)> EmittedEvents => _emittedEvents;
|
||||
|
||||
public Task<TimelineEmitResult> EmitAsync(TimelineEvent evt, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((evt.TenantId, evt.EventType));
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public Task<TimelineBatchEmitResult> EmitBatchAsync(IEnumerable<TimelineEvent> events, CancellationToken cancellationToken = default)
|
||||
{
|
||||
foreach (var evt in events)
|
||||
{
|
||||
_emittedEvents.Add((evt.TenantId, evt.EventType));
|
||||
}
|
||||
return Task.FromResult(new TimelineBatchEmitResult(events.Count(), 0, 0, Array.Empty<string>()));
|
||||
}
|
||||
|
||||
public Task<TimelineEmitResult> EmitJobEventAsync(
|
||||
string tenantId, Guid jobId, string eventType,
|
||||
object? payload = null, string? actor = null, string? correlationId = null,
|
||||
string? traceId = null, string? projectId = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((tenantId, eventType));
|
||||
var evt = TimelineEvent.Create(tenantId, eventType, "test", DateTimeOffset.UtcNow, jobId: jobId);
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public Task<TimelineEmitResult> EmitRunEventAsync(
|
||||
string tenantId, Guid runId, string eventType,
|
||||
object? payload = null, string? actor = null, string? correlationId = null,
|
||||
string? traceId = null, string? projectId = null,
|
||||
IReadOnlyDictionary<string, string>? attributes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_emittedEvents.Add((tenantId, eventType));
|
||||
var evt = TimelineEvent.Create(tenantId, eventType, "test", DateTimeOffset.UtcNow, runId: runId);
|
||||
return Task.FromResult(new TimelineEmitResult(true, evt, false, null));
|
||||
}
|
||||
|
||||
public void Clear() => _emittedEvents.Clear();
|
||||
}
|
||||
@@ -0,0 +1,355 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Orchestrator.Infrastructure.Observability;
|
||||
using StellaOps.Telemetry.Core;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Observability;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for OrchestratorGoldenSignals.
|
||||
/// Per ORCH-OBS-51-001.
|
||||
/// </summary>
|
||||
public class OrchestratorGoldenSignalsTests
|
||||
{
|
||||
private readonly GoldenSignalMetrics _metrics;
|
||||
private readonly OrchestratorGoldenSignals _sut;
|
||||
|
||||
public OrchestratorGoldenSignalsTests()
|
||||
{
|
||||
_metrics = new GoldenSignalMetrics(new GoldenSignalMetricsOptions(), null);
|
||||
_sut = new OrchestratorGoldenSignals(_metrics, NullLogger<OrchestratorGoldenSignals>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_WithNullMetrics_ThrowsArgumentNullException()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
new OrchestratorGoldenSignals(null!, NullLogger<OrchestratorGoldenSignals>.Instance));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_WithNullLogger_ThrowsArgumentNullException()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
new OrchestratorGoldenSignals(_metrics, null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordSchedulingLatency_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordSchedulingLatency("tenant-1", "scan.image", 150.5);
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordDispatchLatency_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordDispatchLatency("tenant-1", "scan.image", 250.0);
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordJobLatency_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordJobLatency("tenant-1", "scan.image", 45.5);
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordRequest_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordRequest("tenant-1", "/api/v1/orchestrator/jobs", "POST", 201);
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordJobError_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordJobError("tenant-1", "scan.image", "timeout");
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordApiError_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordApiError("tenant-1", "/api/v1/orchestrator/jobs", "validation");
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordSchedulingError_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordSchedulingError("tenant-1", "scan.image", "quota_exceeded");
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordJobCreated_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordJobCreated("tenant-1", "scan.image");
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordRunCreated_RecordsMetric()
|
||||
{
|
||||
// Arrange & Act - should not throw
|
||||
_sut.RecordRunCreated("tenant-1", "scheduled");
|
||||
|
||||
// Assert - metric was recorded (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MeasureLatency_ReturnsDisposable()
|
||||
{
|
||||
// Arrange & Act
|
||||
using var scope = _sut.MeasureLatency("tenant-1", "test_operation");
|
||||
|
||||
// Assert - scope is not null and is disposable
|
||||
Assert.NotNull(scope);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void StartActivity_ReturnsActivity()
|
||||
{
|
||||
// Arrange & Act
|
||||
using var activity = _sut.StartActivity("test_operation");
|
||||
|
||||
// Assert - activity might be null if no listener is registered, but that's OK
|
||||
// The method should not throw
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void StartSchedulingActivity_SetsCorrectTags()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var jobType = "scan.image";
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
using var activity = _sut.StartSchedulingActivity(tenantId, jobType, jobId);
|
||||
|
||||
// Assert - activity might be null if no listener, but should set tags if not null
|
||||
if (activity is not null)
|
||||
{
|
||||
Assert.Equal(tenantId, activity.GetTagItem("tenant_id"));
|
||||
Assert.Equal(jobType, activity.GetTagItem("job_type"));
|
||||
Assert.Equal(jobId.ToString(), activity.GetTagItem("job_id"));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void StartDispatchActivity_SetsCorrectTags()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = "tenant-1";
|
||||
var jobType = "scan.image";
|
||||
var jobId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
using var activity = _sut.StartDispatchActivity(tenantId, jobType, jobId);
|
||||
|
||||
// Assert - activity might be null if no listener, but should set tags if not null
|
||||
if (activity is not null)
|
||||
{
|
||||
Assert.Equal(tenantId, activity.GetTagItem("tenant_id"));
|
||||
Assert.Equal(jobType, activity.GetTagItem("job_type"));
|
||||
Assert.Equal(jobId.ToString(), activity.GetTagItem("job_id"));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetQueueSaturationProvider_RegistersProvider()
|
||||
{
|
||||
// Arrange
|
||||
var saturationValue = 0.75;
|
||||
Func<double> provider = () => saturationValue;
|
||||
|
||||
// Act - should not throw
|
||||
_sut.SetQueueSaturationProvider(provider);
|
||||
|
||||
// Assert - provider was registered (no exception)
|
||||
Assert.True(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ActivitySource_HasCorrectName()
|
||||
{
|
||||
// Assert
|
||||
Assert.Equal("StellaOps.Orchestrator", OrchestratorGoldenSignals.ActivitySource.Name);
|
||||
Assert.Equal("1.0.0", OrchestratorGoldenSignals.ActivitySource.Version);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for OrchestratorSloDefinitions.
|
||||
/// </summary>
|
||||
public class OrchestratorSloDefinitionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void SchedulingLatency_HasCorrectValues()
|
||||
{
|
||||
var slo = OrchestratorSloDefinitions.SchedulingLatency;
|
||||
|
||||
Assert.Equal("orchestrator_scheduling_latency", slo.Name);
|
||||
Assert.Equal(0.99, slo.Objective);
|
||||
Assert.Equal(TimeSpan.FromDays(7), slo.Window);
|
||||
Assert.Equal(5.0, slo.ThresholdSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DispatchLatency_HasCorrectValues()
|
||||
{
|
||||
var slo = OrchestratorSloDefinitions.DispatchLatency;
|
||||
|
||||
Assert.Equal("orchestrator_dispatch_latency", slo.Name);
|
||||
Assert.Equal(0.995, slo.Objective);
|
||||
Assert.Equal(TimeSpan.FromDays(7), slo.Window);
|
||||
Assert.Equal(10.0, slo.ThresholdSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JobSuccessRate_HasCorrectValues()
|
||||
{
|
||||
var slo = OrchestratorSloDefinitions.JobSuccessRate;
|
||||
|
||||
Assert.Equal("orchestrator_job_success_rate", slo.Name);
|
||||
Assert.Equal(0.99, slo.Objective);
|
||||
Assert.Equal(TimeSpan.FromDays(7), slo.Window);
|
||||
Assert.Null(slo.ThresholdSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ApiAvailability_HasCorrectValues()
|
||||
{
|
||||
var slo = OrchestratorSloDefinitions.ApiAvailability;
|
||||
|
||||
Assert.Equal("orchestrator_api_availability", slo.Name);
|
||||
Assert.Equal(0.999, slo.Objective);
|
||||
Assert.Equal(TimeSpan.FromDays(7), slo.Window);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void All_ContainsAllDefinitions()
|
||||
{
|
||||
var all = OrchestratorSloDefinitions.All;
|
||||
|
||||
Assert.Equal(4, all.Count);
|
||||
Assert.Contains(OrchestratorSloDefinitions.SchedulingLatency, all);
|
||||
Assert.Contains(OrchestratorSloDefinitions.DispatchLatency, all);
|
||||
Assert.Contains(OrchestratorSloDefinitions.JobSuccessRate, all);
|
||||
Assert.Contains(OrchestratorSloDefinitions.ApiAvailability, all);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for OrchestratorBurnRateAlerts.
|
||||
/// </summary>
|
||||
public class OrchestratorBurnRateAlertsTests
|
||||
{
|
||||
[Fact]
|
||||
public void CriticalBurnRate_Is14()
|
||||
{
|
||||
Assert.Equal(14.0, OrchestratorBurnRateAlerts.CriticalBurnRate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WarningBurnRate_Is6()
|
||||
{
|
||||
Assert.Equal(6.0, OrchestratorBurnRateAlerts.WarningBurnRate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InfoBurnRate_Is1()
|
||||
{
|
||||
Assert.Equal(1.0, OrchestratorBurnRateAlerts.InfoBurnRate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ShortWindow_Is5Minutes()
|
||||
{
|
||||
Assert.Equal(TimeSpan.FromMinutes(5), OrchestratorBurnRateAlerts.ShortWindow);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LongWindow_Is1Hour()
|
||||
{
|
||||
Assert.Equal(TimeSpan.FromHours(1), OrchestratorBurnRateAlerts.LongWindow);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAlertRules_GeneratesCriticalAndWarningRules()
|
||||
{
|
||||
// Arrange
|
||||
var sloName = "test_slo";
|
||||
var objective = 0.99;
|
||||
|
||||
// Act
|
||||
var rules = OrchestratorBurnRateAlerts.GetAlertRules(sloName, objective);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, rules.Count);
|
||||
Assert.Contains("test_slo_burn_rate_critical", rules.Keys);
|
||||
Assert.Contains("test_slo_burn_rate_warning", rules.Keys);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAlertRules_CriticalRuleContainsBurnRateThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var sloName = "test_slo";
|
||||
var objective = 0.99;
|
||||
|
||||
// Act
|
||||
var rules = OrchestratorBurnRateAlerts.GetAlertRules(sloName, objective);
|
||||
|
||||
// Assert
|
||||
var criticalRule = rules["test_slo_burn_rate_critical"];
|
||||
Assert.Contains("14", criticalRule);
|
||||
Assert.Contains("5m", criticalRule);
|
||||
Assert.Contains("1h", criticalRule);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAlertRules_WarningRuleContainsBurnRateThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var sloName = "test_slo";
|
||||
var objective = 0.99;
|
||||
|
||||
// Act
|
||||
var rules = OrchestratorBurnRateAlerts.GetAlertRules(sloName, objective);
|
||||
|
||||
// Assert
|
||||
var warningRule = rules["test_slo_burn_rate_warning"];
|
||||
Assert.Contains("6", warningRule);
|
||||
Assert.Contains("30m", warningRule);
|
||||
Assert.Contains("6h", warningRule);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,344 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.PackRegistry;
|
||||
|
||||
public sealed class PackRegistryContractTests
|
||||
{
|
||||
private const string TestTenantId = "tenant-test";
|
||||
private const string TestCreatedBy = "system";
|
||||
|
||||
[Fact]
|
||||
public void PackResponse_FromDomain_MapsAllFields()
|
||||
{
|
||||
var packId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var publishedAt = now.AddDays(-1);
|
||||
|
||||
var pack = new Pack(
|
||||
PackId: packId,
|
||||
TenantId: TestTenantId,
|
||||
ProjectId: "proj-1",
|
||||
Name: "my-pack",
|
||||
DisplayName: "My Pack",
|
||||
Description: "Test description",
|
||||
Status: PackStatus.Published,
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now.AddHours(1),
|
||||
UpdatedBy: "admin",
|
||||
Metadata: "{\"key\":\"value\"}",
|
||||
Tags: "security,scanning",
|
||||
IconUri: "https://example.com/icon.png",
|
||||
VersionCount: 3,
|
||||
LatestVersion: "2.1.0",
|
||||
PublishedAt: publishedAt,
|
||||
PublishedBy: "publisher");
|
||||
|
||||
var response = PackResponse.FromDomain(pack);
|
||||
|
||||
Assert.Equal(packId, response.PackId);
|
||||
Assert.Equal("my-pack", response.Name);
|
||||
Assert.Equal("My Pack", response.DisplayName);
|
||||
Assert.Equal("Test description", response.Description);
|
||||
Assert.Equal("proj-1", response.ProjectId);
|
||||
Assert.Equal("published", response.Status); // Should be lowercase
|
||||
Assert.Equal(TestCreatedBy, response.CreatedBy);
|
||||
Assert.Equal(now, response.CreatedAt);
|
||||
Assert.Equal(now.AddHours(1), response.UpdatedAt);
|
||||
Assert.Equal("admin", response.UpdatedBy);
|
||||
Assert.Equal("{\"key\":\"value\"}", response.Metadata);
|
||||
Assert.Equal("security,scanning", response.Tags);
|
||||
Assert.Equal("https://example.com/icon.png", response.IconUri);
|
||||
Assert.Equal(3, response.VersionCount);
|
||||
Assert.Equal("2.1.0", response.LatestVersion);
|
||||
Assert.Equal(publishedAt, response.PublishedAt);
|
||||
Assert.Equal("publisher", response.PublishedBy);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackStatus.Draft, "draft")]
|
||||
[InlineData(PackStatus.Published, "published")]
|
||||
[InlineData(PackStatus.Deprecated, "deprecated")]
|
||||
[InlineData(PackStatus.Archived, "archived")]
|
||||
public void PackResponse_FromDomain_StatusIsLowercase(PackStatus status, string expectedStatus)
|
||||
{
|
||||
var pack = CreatePack(status);
|
||||
var response = PackResponse.FromDomain(pack);
|
||||
Assert.Equal(expectedStatus, response.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackVersionResponse_FromDomain_MapsAllFields()
|
||||
{
|
||||
var packVersionId = Guid.NewGuid();
|
||||
var packId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var publishedAt = now.AddDays(-1);
|
||||
var deprecatedAt = now.AddHours(-2);
|
||||
var signedAt = now.AddHours(-3);
|
||||
|
||||
var version = new PackVersion(
|
||||
PackVersionId: packVersionId,
|
||||
TenantId: TestTenantId,
|
||||
PackId: packId,
|
||||
Version: "2.1.0",
|
||||
SemVer: "2.1.0",
|
||||
Status: PackVersionStatus.Deprecated,
|
||||
ArtifactUri: "s3://bucket/pack/2.1.0/artifact.zip",
|
||||
ArtifactDigest: "sha256:abc123",
|
||||
ArtifactMimeType: "application/zip",
|
||||
ArtifactSizeBytes: 2048000,
|
||||
ManifestJson: "{\"pack\":\"manifest\"}",
|
||||
ManifestDigest: "sha256:manifest123",
|
||||
ReleaseNotes: "Bug fixes and improvements",
|
||||
MinEngineVersion: "3.0.0",
|
||||
Dependencies: "{\"dep1\":\"^2.0.0\"}",
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now.AddHours(1),
|
||||
UpdatedBy: "admin",
|
||||
PublishedAt: publishedAt,
|
||||
PublishedBy: "publisher",
|
||||
DeprecatedAt: deprecatedAt,
|
||||
DeprecatedBy: "security-team",
|
||||
DeprecationReason: "Security vulnerability CVE-2024-1234",
|
||||
SignatureUri: "s3://bucket/pack/2.1.0/signature.sig",
|
||||
SignatureAlgorithm: "ecdsa-p256",
|
||||
SignedBy: "signer@example.com",
|
||||
SignedAt: signedAt,
|
||||
Metadata: "{\"build\":\"123\"}",
|
||||
DownloadCount: 15000);
|
||||
|
||||
var response = PackVersionResponse.FromDomain(version);
|
||||
|
||||
Assert.Equal(packVersionId, response.PackVersionId);
|
||||
Assert.Equal(packId, response.PackId);
|
||||
Assert.Equal("2.1.0", response.Version);
|
||||
Assert.Equal("2.1.0", response.SemVer);
|
||||
Assert.Equal("deprecated", response.Status); // Should be lowercase
|
||||
Assert.Equal("s3://bucket/pack/2.1.0/artifact.zip", response.ArtifactUri);
|
||||
Assert.Equal("sha256:abc123", response.ArtifactDigest);
|
||||
Assert.Equal("application/zip", response.ArtifactMimeType);
|
||||
Assert.Equal(2048000L, response.ArtifactSizeBytes);
|
||||
Assert.Equal("sha256:manifest123", response.ManifestDigest);
|
||||
Assert.Equal("Bug fixes and improvements", response.ReleaseNotes);
|
||||
Assert.Equal("3.0.0", response.MinEngineVersion);
|
||||
Assert.Equal("{\"dep1\":\"^2.0.0\"}", response.Dependencies);
|
||||
Assert.Equal(TestCreatedBy, response.CreatedBy);
|
||||
Assert.Equal(now, response.CreatedAt);
|
||||
Assert.Equal(now.AddHours(1), response.UpdatedAt);
|
||||
Assert.Equal("admin", response.UpdatedBy);
|
||||
Assert.Equal(publishedAt, response.PublishedAt);
|
||||
Assert.Equal("publisher", response.PublishedBy);
|
||||
Assert.Equal(deprecatedAt, response.DeprecatedAt);
|
||||
Assert.Equal("security-team", response.DeprecatedBy);
|
||||
Assert.Equal("Security vulnerability CVE-2024-1234", response.DeprecationReason);
|
||||
Assert.True(response.IsSigned);
|
||||
Assert.Equal("ecdsa-p256", response.SignatureAlgorithm);
|
||||
Assert.Equal(signedAt, response.SignedAt);
|
||||
Assert.Equal("{\"build\":\"123\"}", response.Metadata);
|
||||
Assert.Equal(15000, response.DownloadCount);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackVersionStatus.Draft, "draft")]
|
||||
[InlineData(PackVersionStatus.Published, "published")]
|
||||
[InlineData(PackVersionStatus.Deprecated, "deprecated")]
|
||||
[InlineData(PackVersionStatus.Archived, "archived")]
|
||||
public void PackVersionResponse_FromDomain_StatusIsLowercase(PackVersionStatus status, string expectedStatus)
|
||||
{
|
||||
var version = CreatePackVersion(status);
|
||||
var response = PackVersionResponse.FromDomain(version);
|
||||
Assert.Equal(expectedStatus, response.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackVersionResponse_FromDomain_IsSigned_WhenHasSignatureUri()
|
||||
{
|
||||
var version = CreatePackVersion(PackVersionStatus.Published) with
|
||||
{
|
||||
SignatureUri = "s3://bucket/signature.sig"
|
||||
};
|
||||
|
||||
var response = PackVersionResponse.FromDomain(version);
|
||||
Assert.True(response.IsSigned);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackVersionResponse_FromDomain_IsSigned_False_WhenNoSignatureUri()
|
||||
{
|
||||
var version = CreatePackVersion(PackVersionStatus.Published) with
|
||||
{
|
||||
SignatureUri = null
|
||||
};
|
||||
|
||||
var response = PackVersionResponse.FromDomain(version);
|
||||
Assert.False(response.IsSigned);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackListResponse_HasCorrectStructure()
|
||||
{
|
||||
var packs = new List<PackResponse>
|
||||
{
|
||||
PackResponse.FromDomain(CreatePack(PackStatus.Published)),
|
||||
PackResponse.FromDomain(CreatePack(PackStatus.Draft))
|
||||
};
|
||||
|
||||
var response = new PackListResponse(packs, 100, "next-cursor-123");
|
||||
|
||||
Assert.Equal(2, response.Packs.Count);
|
||||
Assert.Equal(100, response.TotalCount);
|
||||
Assert.Equal("next-cursor-123", response.NextCursor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackVersionListResponse_HasCorrectStructure()
|
||||
{
|
||||
var versions = new List<PackVersionResponse>
|
||||
{
|
||||
PackVersionResponse.FromDomain(CreatePackVersion(PackVersionStatus.Published)),
|
||||
PackVersionResponse.FromDomain(CreatePackVersion(PackVersionStatus.Draft))
|
||||
};
|
||||
|
||||
var response = new PackVersionListResponse(versions, 50, "next-cursor-456");
|
||||
|
||||
Assert.Equal(2, response.Versions.Count);
|
||||
Assert.Equal(50, response.TotalCount);
|
||||
Assert.Equal("next-cursor-456", response.NextCursor);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackRegistryStatsResponse_HasCorrectStructure()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var response = new PackRegistryStatsResponse(
|
||||
TotalPacks: 100,
|
||||
PublishedPacks: 75,
|
||||
TotalVersions: 500,
|
||||
PublishedVersions: 400,
|
||||
TotalDownloads: 1_000_000,
|
||||
LastUpdatedAt: now);
|
||||
|
||||
Assert.Equal(100, response.TotalPacks);
|
||||
Assert.Equal(75, response.PublishedPacks);
|
||||
Assert.Equal(500, response.TotalVersions);
|
||||
Assert.Equal(400, response.PublishedVersions);
|
||||
Assert.Equal(1_000_000, response.TotalDownloads);
|
||||
Assert.Equal(now, response.LastUpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackSearchResponse_HasCorrectStructure()
|
||||
{
|
||||
var packs = new List<PackResponse>
|
||||
{
|
||||
PackResponse.FromDomain(CreatePack(PackStatus.Published))
|
||||
};
|
||||
|
||||
var response = new PackSearchResponse(packs, "security scanning");
|
||||
|
||||
Assert.Single(response.Packs);
|
||||
Assert.Equal("security scanning", response.Query);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackVersionDownloadResponse_HasCorrectStructure()
|
||||
{
|
||||
var packVersionId = Guid.NewGuid();
|
||||
var response = new PackVersionDownloadResponse(
|
||||
PackVersionId: packVersionId,
|
||||
Version: "1.0.0",
|
||||
ArtifactUri: "s3://bucket/artifact.zip",
|
||||
ArtifactDigest: "sha256:abc123",
|
||||
ArtifactMimeType: "application/zip",
|
||||
ArtifactSizeBytes: 1024000,
|
||||
SignatureUri: "s3://bucket/signature.sig",
|
||||
SignatureAlgorithm: "ecdsa-p256");
|
||||
|
||||
Assert.Equal(packVersionId, response.PackVersionId);
|
||||
Assert.Equal("1.0.0", response.Version);
|
||||
Assert.Equal("s3://bucket/artifact.zip", response.ArtifactUri);
|
||||
Assert.Equal("sha256:abc123", response.ArtifactDigest);
|
||||
Assert.Equal("application/zip", response.ArtifactMimeType);
|
||||
Assert.Equal(1024000L, response.ArtifactSizeBytes);
|
||||
Assert.Equal("s3://bucket/signature.sig", response.SignatureUri);
|
||||
Assert.Equal("ecdsa-p256", response.SignatureAlgorithm);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PackRegistryErrorResponse_HasCorrectStructure()
|
||||
{
|
||||
var packId = Guid.NewGuid();
|
||||
var packVersionId = Guid.NewGuid();
|
||||
var response = new PackRegistryErrorResponse(
|
||||
Code: "not_found",
|
||||
Message: "Pack not found",
|
||||
PackId: packId,
|
||||
PackVersionId: packVersionId);
|
||||
|
||||
Assert.Equal("not_found", response.Code);
|
||||
Assert.Equal("Pack not found", response.Message);
|
||||
Assert.Equal(packId, response.PackId);
|
||||
Assert.Equal(packVersionId, response.PackVersionId);
|
||||
}
|
||||
|
||||
private static Pack CreatePack(PackStatus status)
|
||||
{
|
||||
return new Pack(
|
||||
PackId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
ProjectId: null,
|
||||
Name: "test-pack",
|
||||
DisplayName: "Test Pack",
|
||||
Description: null,
|
||||
Status: status,
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedBy: null,
|
||||
Metadata: null,
|
||||
Tags: null,
|
||||
IconUri: null,
|
||||
VersionCount: 0,
|
||||
LatestVersion: null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null);
|
||||
}
|
||||
|
||||
private static PackVersion CreatePackVersion(PackVersionStatus status)
|
||||
{
|
||||
return new PackVersion(
|
||||
PackVersionId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
PackId: Guid.NewGuid(),
|
||||
Version: "1.0.0",
|
||||
SemVer: "1.0.0",
|
||||
Status: status,
|
||||
ArtifactUri: "s3://bucket/artifact.zip",
|
||||
ArtifactDigest: "sha256:abc123",
|
||||
ArtifactMimeType: "application/zip",
|
||||
ArtifactSizeBytes: 1024000,
|
||||
ManifestJson: null,
|
||||
ManifestDigest: null,
|
||||
ReleaseNotes: null,
|
||||
MinEngineVersion: null,
|
||||
Dependencies: null,
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedBy: null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null,
|
||||
DeprecatedAt: null,
|
||||
DeprecatedBy: null,
|
||||
DeprecationReason: null,
|
||||
SignatureUri: null,
|
||||
SignatureAlgorithm: null,
|
||||
SignedBy: null,
|
||||
SignedAt: null,
|
||||
Metadata: null,
|
||||
DownloadCount: 0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,302 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.PackRegistry;
|
||||
|
||||
public sealed class PackTests
|
||||
{
|
||||
private const string TestTenantId = "tenant-test";
|
||||
private const string TestName = "my-pack";
|
||||
private const string TestDisplayName = "My Pack";
|
||||
private const string TestCreatedBy = "system";
|
||||
|
||||
[Fact]
|
||||
public void Create_InitializesWithCorrectDefaults()
|
||||
{
|
||||
var packId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var pack = Pack.Create(
|
||||
packId: packId,
|
||||
tenantId: TestTenantId,
|
||||
projectId: "proj-1",
|
||||
name: TestName,
|
||||
displayName: TestDisplayName,
|
||||
description: "Test description",
|
||||
createdBy: TestCreatedBy,
|
||||
metadata: "{\"key\":\"value\"}",
|
||||
tags: "security,scanning",
|
||||
iconUri: "https://example.com/icon.png",
|
||||
createdAt: now);
|
||||
|
||||
Assert.Equal(packId, pack.PackId);
|
||||
Assert.Equal(TestTenantId, pack.TenantId);
|
||||
Assert.Equal("proj-1", pack.ProjectId);
|
||||
Assert.Equal(TestName, pack.Name); // Should be lowercased
|
||||
Assert.Equal(TestDisplayName, pack.DisplayName);
|
||||
Assert.Equal("Test description", pack.Description);
|
||||
Assert.Equal(PackStatus.Draft, pack.Status);
|
||||
Assert.Equal(TestCreatedBy, pack.CreatedBy);
|
||||
Assert.Equal(now, pack.CreatedAt);
|
||||
Assert.Equal(now, pack.UpdatedAt);
|
||||
Assert.Null(pack.UpdatedBy);
|
||||
Assert.Equal("{\"key\":\"value\"}", pack.Metadata);
|
||||
Assert.Equal("security,scanning", pack.Tags);
|
||||
Assert.Equal("https://example.com/icon.png", pack.IconUri);
|
||||
Assert.Equal(0, pack.VersionCount);
|
||||
Assert.Null(pack.LatestVersion);
|
||||
Assert.Null(pack.PublishedAt);
|
||||
Assert.Null(pack.PublishedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_LowercasesName()
|
||||
{
|
||||
var pack = Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
projectId: null,
|
||||
name: "My-PACK-Name",
|
||||
displayName: TestDisplayName,
|
||||
description: null,
|
||||
createdBy: TestCreatedBy);
|
||||
|
||||
Assert.Equal("my-pack-name", pack.Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithMinimalParameters()
|
||||
{
|
||||
var pack = Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
projectId: null,
|
||||
name: TestName,
|
||||
displayName: TestDisplayName,
|
||||
description: null,
|
||||
createdBy: TestCreatedBy);
|
||||
|
||||
Assert.Null(pack.ProjectId);
|
||||
Assert.Null(pack.Description);
|
||||
Assert.Null(pack.Metadata);
|
||||
Assert.Null(pack.Tags);
|
||||
Assert.Null(pack.IconUri);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackStatus.Archived, true)]
|
||||
[InlineData(PackStatus.Draft, false)]
|
||||
[InlineData(PackStatus.Published, false)]
|
||||
[InlineData(PackStatus.Deprecated, false)]
|
||||
public void IsTerminal_ReturnsCorrectValue(PackStatus status, bool expectedIsTerminal)
|
||||
{
|
||||
var pack = CreatePackWithStatus(status);
|
||||
Assert.Equal(expectedIsTerminal, pack.IsTerminal);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackStatus.Draft, true)]
|
||||
[InlineData(PackStatus.Published, true)]
|
||||
[InlineData(PackStatus.Deprecated, false)]
|
||||
[InlineData(PackStatus.Archived, false)]
|
||||
public void CanAddVersion_ReturnsCorrectValue(PackStatus status, bool expectedCanAdd)
|
||||
{
|
||||
var pack = CreatePackWithStatus(status);
|
||||
Assert.Equal(expectedCanAdd, pack.CanAddVersion);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackStatus.Draft, 0, false)] // Draft with no versions cannot publish
|
||||
[InlineData(PackStatus.Draft, 1, true)] // Draft with versions can publish
|
||||
[InlineData(PackStatus.Published, 1, false)] // Already published
|
||||
[InlineData(PackStatus.Deprecated, 1, false)] // Deprecated cannot publish
|
||||
[InlineData(PackStatus.Archived, 1, false)] // Archived cannot publish
|
||||
public void CanPublish_ReturnsCorrectValue(PackStatus status, int versionCount, bool expectedCanPublish)
|
||||
{
|
||||
var pack = CreatePackWithStatusAndVersionCount(status, versionCount);
|
||||
Assert.Equal(expectedCanPublish, pack.CanPublish);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackStatus.Published, true)]
|
||||
[InlineData(PackStatus.Draft, false)]
|
||||
[InlineData(PackStatus.Deprecated, false)]
|
||||
[InlineData(PackStatus.Archived, false)]
|
||||
public void CanDeprecate_ReturnsCorrectValue(PackStatus status, bool expectedCanDeprecate)
|
||||
{
|
||||
var pack = CreatePackWithStatus(status);
|
||||
Assert.Equal(expectedCanDeprecate, pack.CanDeprecate);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackStatus.Draft, true)]
|
||||
[InlineData(PackStatus.Deprecated, true)]
|
||||
[InlineData(PackStatus.Published, false)]
|
||||
[InlineData(PackStatus.Archived, false)]
|
||||
public void CanArchive_ReturnsCorrectValue(PackStatus status, bool expectedCanArchive)
|
||||
{
|
||||
var pack = CreatePackWithStatus(status);
|
||||
Assert.Equal(expectedCanArchive, pack.CanArchive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithStatus_UpdatesStatusAndTimestamp()
|
||||
{
|
||||
var pack = CreatePackWithStatus(PackStatus.Draft);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = pack.WithStatus(PackStatus.Published, "admin", now);
|
||||
|
||||
Assert.Equal(PackStatus.Published, updated.Status);
|
||||
Assert.Equal("admin", updated.UpdatedBy);
|
||||
Assert.Equal(now, updated.UpdatedAt);
|
||||
Assert.Equal(now, updated.PublishedAt);
|
||||
Assert.Equal("admin", updated.PublishedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithStatus_DoesNotUpdatePublishedInfo_WhenNotPublishing()
|
||||
{
|
||||
var pack = CreatePackWithStatusAndVersionCount(PackStatus.Draft, 1) with
|
||||
{
|
||||
PublishedAt = null,
|
||||
PublishedBy = null
|
||||
};
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = pack.WithStatus(PackStatus.Archived, "admin", now);
|
||||
|
||||
Assert.Equal(PackStatus.Archived, updated.Status);
|
||||
Assert.Null(updated.PublishedAt);
|
||||
Assert.Null(updated.PublishedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithVersionAdded_IncrementsVersionCount()
|
||||
{
|
||||
var pack = CreatePackWithStatus(PackStatus.Draft);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = pack.WithVersionAdded("1.0.0", "developer", now);
|
||||
|
||||
Assert.Equal(1, updated.VersionCount);
|
||||
Assert.Equal("1.0.0", updated.LatestVersion);
|
||||
Assert.Equal("developer", updated.UpdatedBy);
|
||||
Assert.Equal(now, updated.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithVersionAdded_MultipleTimes_IncrementsCorrectly()
|
||||
{
|
||||
var pack = CreatePackWithStatus(PackStatus.Draft);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated1 = pack.WithVersionAdded("1.0.0", "dev1", now);
|
||||
var updated2 = updated1.WithVersionAdded("1.1.0", "dev2", now.AddHours(1));
|
||||
var updated3 = updated2.WithVersionAdded("2.0.0", "dev1", now.AddHours(2));
|
||||
|
||||
Assert.Equal(3, updated3.VersionCount);
|
||||
Assert.Equal("2.0.0", updated3.LatestVersion);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Create_ThrowsArgumentException_ForEmptyOrWhitespaceTenantId(string tenantId)
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() => Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: tenantId,
|
||||
projectId: null,
|
||||
name: TestName,
|
||||
displayName: TestDisplayName,
|
||||
description: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ThrowsArgumentNullException_ForNullTenantId()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: null!,
|
||||
projectId: null,
|
||||
name: TestName,
|
||||
displayName: TestDisplayName,
|
||||
description: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Create_ThrowsArgumentException_ForEmptyOrWhitespaceName(string name)
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() => Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
projectId: null,
|
||||
name: name,
|
||||
displayName: TestDisplayName,
|
||||
description: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ThrowsArgumentNullException_ForNullName()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
projectId: null,
|
||||
name: null!,
|
||||
displayName: TestDisplayName,
|
||||
description: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
private static Pack CreatePackWithStatus(PackStatus status)
|
||||
{
|
||||
return new Pack(
|
||||
PackId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
ProjectId: null,
|
||||
Name: TestName,
|
||||
DisplayName: TestDisplayName,
|
||||
Description: null,
|
||||
Status: status,
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedBy: null,
|
||||
Metadata: null,
|
||||
Tags: null,
|
||||
IconUri: null,
|
||||
VersionCount: 0,
|
||||
LatestVersion: null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null);
|
||||
}
|
||||
|
||||
private static Pack CreatePackWithStatusAndVersionCount(PackStatus status, int versionCount)
|
||||
{
|
||||
return new Pack(
|
||||
PackId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
ProjectId: null,
|
||||
Name: TestName,
|
||||
DisplayName: TestDisplayName,
|
||||
Description: null,
|
||||
Status: status,
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedBy: null,
|
||||
Metadata: null,
|
||||
Tags: null,
|
||||
IconUri: null,
|
||||
VersionCount: versionCount,
|
||||
LatestVersion: versionCount > 0 ? "1.0.0" : null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,394 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.PackRegistry;
|
||||
|
||||
public sealed class PackVersionTests
|
||||
{
|
||||
private const string TestTenantId = "tenant-test";
|
||||
private const string TestVersion = "1.0.0";
|
||||
private const string TestArtifactUri = "s3://bucket/pack/1.0.0/artifact.zip";
|
||||
private const string TestArtifactDigest = "sha256:abc123def456";
|
||||
private const string TestCreatedBy = "system";
|
||||
|
||||
[Fact]
|
||||
public void Create_InitializesWithCorrectDefaults()
|
||||
{
|
||||
var packVersionId = Guid.NewGuid();
|
||||
var packId = Guid.NewGuid();
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var version = PackVersion.Create(
|
||||
packVersionId: packVersionId,
|
||||
tenantId: TestTenantId,
|
||||
packId: packId,
|
||||
version: TestVersion,
|
||||
semVer: "1.0.0",
|
||||
artifactUri: TestArtifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: "application/zip",
|
||||
artifactSizeBytes: 1024000,
|
||||
manifestJson: "{\"pack\":\"manifest\"}",
|
||||
manifestDigest: "sha256:manifest123",
|
||||
releaseNotes: "Initial release",
|
||||
minEngineVersion: "2.0.0",
|
||||
dependencies: "{\"dep1\":\"^1.0.0\"}",
|
||||
createdBy: TestCreatedBy,
|
||||
metadata: "{\"key\":\"value\"}",
|
||||
createdAt: now);
|
||||
|
||||
Assert.Equal(packVersionId, version.PackVersionId);
|
||||
Assert.Equal(TestTenantId, version.TenantId);
|
||||
Assert.Equal(packId, version.PackId);
|
||||
Assert.Equal(TestVersion, version.Version);
|
||||
Assert.Equal("1.0.0", version.SemVer);
|
||||
Assert.Equal(PackVersionStatus.Draft, version.Status);
|
||||
Assert.Equal(TestArtifactUri, version.ArtifactUri);
|
||||
Assert.Equal(TestArtifactDigest, version.ArtifactDigest);
|
||||
Assert.Equal("application/zip", version.ArtifactMimeType);
|
||||
Assert.Equal(1024000L, version.ArtifactSizeBytes);
|
||||
Assert.Equal("{\"pack\":\"manifest\"}", version.ManifestJson);
|
||||
Assert.Equal("sha256:manifest123", version.ManifestDigest);
|
||||
Assert.Equal("Initial release", version.ReleaseNotes);
|
||||
Assert.Equal("2.0.0", version.MinEngineVersion);
|
||||
Assert.Equal("{\"dep1\":\"^1.0.0\"}", version.Dependencies);
|
||||
Assert.Equal(TestCreatedBy, version.CreatedBy);
|
||||
Assert.Equal(now, version.CreatedAt);
|
||||
Assert.Equal(now, version.UpdatedAt);
|
||||
Assert.Null(version.UpdatedBy);
|
||||
Assert.Null(version.PublishedAt);
|
||||
Assert.Null(version.PublishedBy);
|
||||
Assert.Null(version.DeprecatedAt);
|
||||
Assert.Null(version.DeprecatedBy);
|
||||
Assert.Null(version.DeprecationReason);
|
||||
Assert.Null(version.SignatureUri);
|
||||
Assert.Null(version.SignatureAlgorithm);
|
||||
Assert.Null(version.SignedBy);
|
||||
Assert.Null(version.SignedAt);
|
||||
Assert.Equal("{\"key\":\"value\"}", version.Metadata);
|
||||
Assert.Equal(0, version.DownloadCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithMinimalParameters()
|
||||
{
|
||||
var version = PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
packId: Guid.NewGuid(),
|
||||
version: TestVersion,
|
||||
semVer: null,
|
||||
artifactUri: TestArtifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy);
|
||||
|
||||
Assert.Null(version.SemVer);
|
||||
Assert.Null(version.ArtifactMimeType);
|
||||
Assert.Null(version.ArtifactSizeBytes);
|
||||
Assert.Null(version.ManifestJson);
|
||||
Assert.Null(version.ReleaseNotes);
|
||||
Assert.Null(version.Metadata);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackVersionStatus.Archived, true)]
|
||||
[InlineData(PackVersionStatus.Draft, false)]
|
||||
[InlineData(PackVersionStatus.Published, false)]
|
||||
[InlineData(PackVersionStatus.Deprecated, false)]
|
||||
public void IsTerminal_ReturnsCorrectValue(PackVersionStatus status, bool expectedIsTerminal)
|
||||
{
|
||||
var version = CreateVersionWithStatus(status);
|
||||
Assert.Equal(expectedIsTerminal, version.IsTerminal);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackVersionStatus.Draft, true)]
|
||||
[InlineData(PackVersionStatus.Published, false)]
|
||||
[InlineData(PackVersionStatus.Deprecated, false)]
|
||||
[InlineData(PackVersionStatus.Archived, false)]
|
||||
public void CanPublish_ReturnsCorrectValue(PackVersionStatus status, bool expectedCanPublish)
|
||||
{
|
||||
var version = CreateVersionWithStatus(status);
|
||||
Assert.Equal(expectedCanPublish, version.CanPublish);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackVersionStatus.Published, true)]
|
||||
[InlineData(PackVersionStatus.Draft, false)]
|
||||
[InlineData(PackVersionStatus.Deprecated, false)]
|
||||
[InlineData(PackVersionStatus.Archived, false)]
|
||||
public void CanDeprecate_ReturnsCorrectValue(PackVersionStatus status, bool expectedCanDeprecate)
|
||||
{
|
||||
var version = CreateVersionWithStatus(status);
|
||||
Assert.Equal(expectedCanDeprecate, version.CanDeprecate);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(PackVersionStatus.Draft, true)]
|
||||
[InlineData(PackVersionStatus.Deprecated, true)]
|
||||
[InlineData(PackVersionStatus.Published, false)]
|
||||
[InlineData(PackVersionStatus.Archived, false)]
|
||||
public void CanArchive_ReturnsCorrectValue(PackVersionStatus status, bool expectedCanArchive)
|
||||
{
|
||||
var version = CreateVersionWithStatus(status);
|
||||
Assert.Equal(expectedCanArchive, version.CanArchive);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsSigned_ReturnsFalse_WhenNoSignature()
|
||||
{
|
||||
var version = CreateVersionWithStatus(PackVersionStatus.Draft);
|
||||
Assert.False(version.IsSigned);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsSigned_ReturnsTrue_WhenHasSignature()
|
||||
{
|
||||
var version = CreateVersionWithStatus(PackVersionStatus.Draft) with
|
||||
{
|
||||
SignatureUri = "s3://bucket/pack/1.0.0/signature.sig"
|
||||
};
|
||||
Assert.True(version.IsSigned);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithStatus_UpdatesStatusAndTimestamp()
|
||||
{
|
||||
var version = CreateVersionWithStatus(PackVersionStatus.Draft);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = version.WithStatus(PackVersionStatus.Published, "admin", now);
|
||||
|
||||
Assert.Equal(PackVersionStatus.Published, updated.Status);
|
||||
Assert.Equal("admin", updated.UpdatedBy);
|
||||
Assert.Equal(now, updated.UpdatedAt);
|
||||
Assert.Equal(now, updated.PublishedAt);
|
||||
Assert.Equal("admin", updated.PublishedBy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithDeprecation_SetsDeprecationInfo()
|
||||
{
|
||||
var version = CreateVersionWithStatus(PackVersionStatus.Published);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = version.WithDeprecation("admin", "Security vulnerability found", now);
|
||||
|
||||
Assert.Equal(PackVersionStatus.Deprecated, updated.Status);
|
||||
Assert.Equal("admin", updated.UpdatedBy);
|
||||
Assert.Equal(now, updated.UpdatedAt);
|
||||
Assert.Equal(now, updated.DeprecatedAt);
|
||||
Assert.Equal("admin", updated.DeprecatedBy);
|
||||
Assert.Equal("Security vulnerability found", updated.DeprecationReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithSignature_SetsSignatureInfo()
|
||||
{
|
||||
var version = CreateVersionWithStatus(PackVersionStatus.Draft);
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
var updated = version.WithSignature(
|
||||
"s3://bucket/pack/1.0.0/signature.sig",
|
||||
"ecdsa-p256",
|
||||
"signer@example.com",
|
||||
now);
|
||||
|
||||
Assert.Equal("s3://bucket/pack/1.0.0/signature.sig", updated.SignatureUri);
|
||||
Assert.Equal("ecdsa-p256", updated.SignatureAlgorithm);
|
||||
Assert.Equal("signer@example.com", updated.SignedBy);
|
||||
Assert.Equal(now, updated.SignedAt);
|
||||
Assert.Equal(now, updated.UpdatedAt);
|
||||
Assert.Equal("signer@example.com", updated.UpdatedBy);
|
||||
Assert.True(updated.IsSigned);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithDownload_IncrementsDownloadCount()
|
||||
{
|
||||
var version = CreateVersionWithStatus(PackVersionStatus.Published);
|
||||
Assert.Equal(0, version.DownloadCount);
|
||||
|
||||
var updated1 = version.WithDownload();
|
||||
Assert.Equal(1, updated1.DownloadCount);
|
||||
|
||||
var updated2 = updated1.WithDownload();
|
||||
Assert.Equal(2, updated2.DownloadCount);
|
||||
|
||||
var updated3 = updated2.WithDownload();
|
||||
Assert.Equal(3, updated3.DownloadCount);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Create_ThrowsArgumentException_ForEmptyOrWhitespaceTenantId(string tenantId)
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() => PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: tenantId,
|
||||
packId: Guid.NewGuid(),
|
||||
version: TestVersion,
|
||||
semVer: null,
|
||||
artifactUri: TestArtifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ThrowsArgumentNullException_ForNullTenantId()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: null!,
|
||||
packId: Guid.NewGuid(),
|
||||
version: TestVersion,
|
||||
semVer: null,
|
||||
artifactUri: TestArtifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Create_ThrowsArgumentException_ForEmptyOrWhitespaceVersion(string versionString)
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() => PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
packId: Guid.NewGuid(),
|
||||
version: versionString,
|
||||
semVer: null,
|
||||
artifactUri: TestArtifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ThrowsArgumentNullException_ForNullVersion()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
packId: Guid.NewGuid(),
|
||||
version: null!,
|
||||
semVer: null,
|
||||
artifactUri: TestArtifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Create_ThrowsArgumentException_ForEmptyOrWhitespaceArtifactUri(string artifactUri)
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() => PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
packId: Guid.NewGuid(),
|
||||
version: TestVersion,
|
||||
semVer: null,
|
||||
artifactUri: artifactUri,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ThrowsArgumentNullException_ForNullArtifactUri()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() => PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: TestTenantId,
|
||||
packId: Guid.NewGuid(),
|
||||
version: TestVersion,
|
||||
semVer: null,
|
||||
artifactUri: null!,
|
||||
artifactDigest: TestArtifactDigest,
|
||||
artifactMimeType: null,
|
||||
artifactSizeBytes: null,
|
||||
manifestJson: null,
|
||||
manifestDigest: null,
|
||||
releaseNotes: null,
|
||||
minEngineVersion: null,
|
||||
dependencies: null,
|
||||
createdBy: TestCreatedBy));
|
||||
}
|
||||
|
||||
private static PackVersion CreateVersionWithStatus(PackVersionStatus status)
|
||||
{
|
||||
return new PackVersion(
|
||||
PackVersionId: Guid.NewGuid(),
|
||||
TenantId: TestTenantId,
|
||||
PackId: Guid.NewGuid(),
|
||||
Version: TestVersion,
|
||||
SemVer: TestVersion,
|
||||
Status: status,
|
||||
ArtifactUri: TestArtifactUri,
|
||||
ArtifactDigest: TestArtifactDigest,
|
||||
ArtifactMimeType: "application/zip",
|
||||
ArtifactSizeBytes: 1024000,
|
||||
ManifestJson: null,
|
||||
ManifestDigest: null,
|
||||
ReleaseNotes: null,
|
||||
MinEngineVersion: null,
|
||||
Dependencies: null,
|
||||
CreatedBy: TestCreatedBy,
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
UpdatedBy: null,
|
||||
PublishedAt: null,
|
||||
PublishedBy: null,
|
||||
DeprecatedAt: null,
|
||||
DeprecatedBy: null,
|
||||
DeprecationReason: null,
|
||||
SignatureUri: null,
|
||||
SignatureAlgorithm: null,
|
||||
SignedBy: null,
|
||||
SignedAt: null,
|
||||
Metadata: null,
|
||||
DownloadCount: 0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,292 @@
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.WebService.Contracts;
|
||||
|
||||
// ========== Pack CRUD Requests/Responses ==========
|
||||
|
||||
/// <summary>
|
||||
/// Request to create a new pack in the registry.
|
||||
/// </summary>
|
||||
public sealed record CreatePackRequest(
|
||||
/// <summary>Unique pack name (lowercase, URL-safe).</summary>
|
||||
string Name,
|
||||
|
||||
/// <summary>Display name for the pack.</summary>
|
||||
string DisplayName,
|
||||
|
||||
/// <summary>Optional pack description.</summary>
|
||||
string? Description,
|
||||
|
||||
/// <summary>Optional project scope.</summary>
|
||||
string? ProjectId,
|
||||
|
||||
/// <summary>Optional metadata JSON.</summary>
|
||||
string? Metadata,
|
||||
|
||||
/// <summary>Optional comma-separated tags.</summary>
|
||||
string? Tags,
|
||||
|
||||
/// <summary>Optional icon URI.</summary>
|
||||
string? IconUri);
|
||||
|
||||
/// <summary>
|
||||
/// Response representing a pack.
|
||||
/// </summary>
|
||||
public sealed record PackResponse(
|
||||
Guid PackId,
|
||||
string Name,
|
||||
string DisplayName,
|
||||
string? Description,
|
||||
string? ProjectId,
|
||||
string Status,
|
||||
string CreatedBy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? UpdatedBy,
|
||||
string? Metadata,
|
||||
string? Tags,
|
||||
string? IconUri,
|
||||
int VersionCount,
|
||||
string? LatestVersion,
|
||||
DateTimeOffset? PublishedAt,
|
||||
string? PublishedBy)
|
||||
{
|
||||
public static PackResponse FromDomain(Pack pack) => new(
|
||||
pack.PackId,
|
||||
pack.Name,
|
||||
pack.DisplayName,
|
||||
pack.Description,
|
||||
pack.ProjectId,
|
||||
pack.Status.ToString().ToLowerInvariant(),
|
||||
pack.CreatedBy,
|
||||
pack.CreatedAt,
|
||||
pack.UpdatedAt,
|
||||
pack.UpdatedBy,
|
||||
pack.Metadata,
|
||||
pack.Tags,
|
||||
pack.IconUri,
|
||||
pack.VersionCount,
|
||||
pack.LatestVersion,
|
||||
pack.PublishedAt,
|
||||
pack.PublishedBy);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response containing a paginated list of packs.
|
||||
/// </summary>
|
||||
public sealed record PackListResponse(
|
||||
IReadOnlyList<PackResponse> Packs,
|
||||
int TotalCount,
|
||||
string? NextCursor);
|
||||
|
||||
/// <summary>
|
||||
/// Request to update a pack.
|
||||
/// </summary>
|
||||
public sealed record UpdatePackRequest(
|
||||
/// <summary>Updated display name.</summary>
|
||||
string? DisplayName,
|
||||
|
||||
/// <summary>Updated description.</summary>
|
||||
string? Description,
|
||||
|
||||
/// <summary>Updated metadata JSON.</summary>
|
||||
string? Metadata,
|
||||
|
||||
/// <summary>Updated comma-separated tags.</summary>
|
||||
string? Tags,
|
||||
|
||||
/// <summary>Updated icon URI.</summary>
|
||||
string? IconUri);
|
||||
|
||||
/// <summary>
|
||||
/// Request to update pack status (publish, deprecate, archive).
|
||||
/// </summary>
|
||||
public sealed record UpdatePackStatusRequest(
|
||||
/// <summary>New status: draft, published, deprecated, archived.</summary>
|
||||
string Status);
|
||||
|
||||
// ========== Pack Version Requests/Responses ==========
|
||||
|
||||
/// <summary>
|
||||
/// Request to create a new pack version.
|
||||
/// </summary>
|
||||
public sealed record CreatePackVersionRequest(
|
||||
/// <summary>Version string (e.g., "1.0.0", "2.0.0-beta.1").</summary>
|
||||
string Version,
|
||||
|
||||
/// <summary>Optional semantic version for sorting.</summary>
|
||||
string? SemVer,
|
||||
|
||||
/// <summary>Artifact storage URI.</summary>
|
||||
string ArtifactUri,
|
||||
|
||||
/// <summary>Artifact content digest (SHA-256).</summary>
|
||||
string ArtifactDigest,
|
||||
|
||||
/// <summary>Artifact MIME type.</summary>
|
||||
string? ArtifactMimeType,
|
||||
|
||||
/// <summary>Artifact size in bytes.</summary>
|
||||
long? ArtifactSizeBytes,
|
||||
|
||||
/// <summary>Pack manifest JSON.</summary>
|
||||
string? ManifestJson,
|
||||
|
||||
/// <summary>Manifest digest for verification.</summary>
|
||||
string? ManifestDigest,
|
||||
|
||||
/// <summary>Release notes.</summary>
|
||||
string? ReleaseNotes,
|
||||
|
||||
/// <summary>Minimum engine version required.</summary>
|
||||
string? MinEngineVersion,
|
||||
|
||||
/// <summary>Dependencies JSON.</summary>
|
||||
string? Dependencies,
|
||||
|
||||
/// <summary>Optional metadata JSON.</summary>
|
||||
string? Metadata);
|
||||
|
||||
/// <summary>
|
||||
/// Response representing a pack version.
|
||||
/// </summary>
|
||||
public sealed record PackVersionResponse(
|
||||
Guid PackVersionId,
|
||||
Guid PackId,
|
||||
string Version,
|
||||
string? SemVer,
|
||||
string Status,
|
||||
string ArtifactUri,
|
||||
string ArtifactDigest,
|
||||
string? ArtifactMimeType,
|
||||
long? ArtifactSizeBytes,
|
||||
string? ManifestDigest,
|
||||
string? ReleaseNotes,
|
||||
string? MinEngineVersion,
|
||||
string? Dependencies,
|
||||
string CreatedBy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? UpdatedBy,
|
||||
DateTimeOffset? PublishedAt,
|
||||
string? PublishedBy,
|
||||
DateTimeOffset? DeprecatedAt,
|
||||
string? DeprecatedBy,
|
||||
string? DeprecationReason,
|
||||
bool IsSigned,
|
||||
string? SignatureAlgorithm,
|
||||
DateTimeOffset? SignedAt,
|
||||
string? Metadata,
|
||||
int DownloadCount)
|
||||
{
|
||||
public static PackVersionResponse FromDomain(PackVersion version) => new(
|
||||
version.PackVersionId,
|
||||
version.PackId,
|
||||
version.Version,
|
||||
version.SemVer,
|
||||
version.Status.ToString().ToLowerInvariant(),
|
||||
version.ArtifactUri,
|
||||
version.ArtifactDigest,
|
||||
version.ArtifactMimeType,
|
||||
version.ArtifactSizeBytes,
|
||||
version.ManifestDigest,
|
||||
version.ReleaseNotes,
|
||||
version.MinEngineVersion,
|
||||
version.Dependencies,
|
||||
version.CreatedBy,
|
||||
version.CreatedAt,
|
||||
version.UpdatedAt,
|
||||
version.UpdatedBy,
|
||||
version.PublishedAt,
|
||||
version.PublishedBy,
|
||||
version.DeprecatedAt,
|
||||
version.DeprecatedBy,
|
||||
version.DeprecationReason,
|
||||
version.IsSigned,
|
||||
version.SignatureAlgorithm,
|
||||
version.SignedAt,
|
||||
version.Metadata,
|
||||
version.DownloadCount);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response containing a paginated list of pack versions.
|
||||
/// </summary>
|
||||
public sealed record PackVersionListResponse(
|
||||
IReadOnlyList<PackVersionResponse> Versions,
|
||||
int TotalCount,
|
||||
string? NextCursor);
|
||||
|
||||
/// <summary>
|
||||
/// Request to update a pack version.
|
||||
/// </summary>
|
||||
public sealed record UpdatePackVersionRequest(
|
||||
/// <summary>Updated release notes.</summary>
|
||||
string? ReleaseNotes,
|
||||
|
||||
/// <summary>Updated metadata JSON.</summary>
|
||||
string? Metadata);
|
||||
|
||||
/// <summary>
|
||||
/// Request to update pack version status (publish, deprecate, archive).
|
||||
/// </summary>
|
||||
public sealed record UpdatePackVersionStatusRequest(
|
||||
/// <summary>New status: draft, published, deprecated, archived.</summary>
|
||||
string Status,
|
||||
|
||||
/// <summary>Deprecation reason (required when status is deprecated).</summary>
|
||||
string? DeprecationReason);
|
||||
|
||||
/// <summary>
|
||||
/// Request to sign a pack version.
|
||||
/// </summary>
|
||||
public sealed record SignPackVersionRequest(
|
||||
/// <summary>Signature storage URI.</summary>
|
||||
string SignatureUri,
|
||||
|
||||
/// <summary>Signature algorithm (e.g., "ecdsa-p256", "rsa-sha256").</summary>
|
||||
string SignatureAlgorithm);
|
||||
|
||||
/// <summary>
|
||||
/// Response for a download request (includes artifact URL).
|
||||
/// </summary>
|
||||
public sealed record PackVersionDownloadResponse(
|
||||
Guid PackVersionId,
|
||||
string Version,
|
||||
string ArtifactUri,
|
||||
string ArtifactDigest,
|
||||
string? ArtifactMimeType,
|
||||
long? ArtifactSizeBytes,
|
||||
string? SignatureUri,
|
||||
string? SignatureAlgorithm);
|
||||
|
||||
// ========== Search and Discovery ==========
|
||||
|
||||
/// <summary>
|
||||
/// Response for pack search results.
|
||||
/// </summary>
|
||||
public sealed record PackSearchResponse(
|
||||
IReadOnlyList<PackResponse> Packs,
|
||||
string Query);
|
||||
|
||||
/// <summary>
|
||||
/// Response for registry statistics.
|
||||
/// </summary>
|
||||
public sealed record PackRegistryStatsResponse(
|
||||
int TotalPacks,
|
||||
int PublishedPacks,
|
||||
int TotalVersions,
|
||||
int PublishedVersions,
|
||||
long TotalDownloads,
|
||||
DateTimeOffset? LastUpdatedAt);
|
||||
|
||||
// ========== Error Response ==========
|
||||
|
||||
/// <summary>
|
||||
/// Error response for pack registry operations.
|
||||
/// </summary>
|
||||
public sealed record PackRegistryErrorResponse(
|
||||
string Code,
|
||||
string Message,
|
||||
Guid? PackId,
|
||||
Guid? PackVersionId);
|
||||
@@ -0,0 +1,875 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
using StellaOps.Orchestrator.Infrastructure.Repositories;
|
||||
using StellaOps.Orchestrator.WebService.Contracts;
|
||||
using StellaOps.Orchestrator.WebService.Services;
|
||||
|
||||
namespace StellaOps.Orchestrator.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// Pack registry endpoints for pack management, versioning, and discovery.
|
||||
/// Per 150.B-PacksRegistry: Registry API for pack CRUD operations.
|
||||
/// </summary>
|
||||
public static class PackRegistryEndpoints
|
||||
{
|
||||
private const int DefaultLimit = 50;
|
||||
private const int MaxLimit = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Maps pack registry endpoints to the route builder.
|
||||
/// </summary>
|
||||
public static RouteGroupBuilder MapPackRegistryEndpoints(this IEndpointRouteBuilder app)
|
||||
{
|
||||
var group = app.MapGroup("/api/v1/orchestrator/registry/packs")
|
||||
.WithTags("Orchestrator Pack Registry");
|
||||
|
||||
// Pack CRUD endpoints
|
||||
group.MapPost("", CreatePack)
|
||||
.WithName("Registry_CreatePack")
|
||||
.WithDescription("Create a new pack in the registry");
|
||||
|
||||
group.MapGet("{packId:guid}", GetPackById)
|
||||
.WithName("Registry_GetPackById")
|
||||
.WithDescription("Get pack by ID");
|
||||
|
||||
group.MapGet("by-name/{name}", GetPackByName)
|
||||
.WithName("Registry_GetPackByName")
|
||||
.WithDescription("Get pack by name");
|
||||
|
||||
group.MapGet("", ListPacks)
|
||||
.WithName("Registry_ListPacks")
|
||||
.WithDescription("List packs with filters");
|
||||
|
||||
group.MapPatch("{packId:guid}", UpdatePack)
|
||||
.WithName("Registry_UpdatePack")
|
||||
.WithDescription("Update pack metadata");
|
||||
|
||||
group.MapPost("{packId:guid}/status", UpdatePackStatus)
|
||||
.WithName("Registry_UpdatePackStatus")
|
||||
.WithDescription("Update pack status (publish, deprecate, archive)");
|
||||
|
||||
group.MapDelete("{packId:guid}", DeletePack)
|
||||
.WithName("Registry_DeletePack")
|
||||
.WithDescription("Delete a draft pack with no versions");
|
||||
|
||||
// Pack version endpoints
|
||||
group.MapPost("{packId:guid}/versions", CreatePackVersion)
|
||||
.WithName("Registry_CreatePackVersion")
|
||||
.WithDescription("Create a new version for a pack");
|
||||
|
||||
group.MapGet("{packId:guid}/versions", ListVersions)
|
||||
.WithName("Registry_ListVersions")
|
||||
.WithDescription("List versions for a pack");
|
||||
|
||||
group.MapGet("{packId:guid}/versions/{version}", GetVersion)
|
||||
.WithName("Registry_GetVersion")
|
||||
.WithDescription("Get a specific pack version");
|
||||
|
||||
group.MapGet("{packId:guid}/versions/latest", GetLatestVersion)
|
||||
.WithName("Registry_GetLatestVersion")
|
||||
.WithDescription("Get the latest published version");
|
||||
|
||||
group.MapPatch("{packId:guid}/versions/{packVersionId:guid}", UpdateVersion)
|
||||
.WithName("Registry_UpdateVersion")
|
||||
.WithDescription("Update version metadata");
|
||||
|
||||
group.MapPost("{packId:guid}/versions/{packVersionId:guid}/status", UpdateVersionStatus)
|
||||
.WithName("Registry_UpdateVersionStatus")
|
||||
.WithDescription("Update version status (publish, deprecate, archive)");
|
||||
|
||||
group.MapPost("{packId:guid}/versions/{packVersionId:guid}/sign", SignVersion)
|
||||
.WithName("Registry_SignVersion")
|
||||
.WithDescription("Sign a pack version");
|
||||
|
||||
group.MapPost("{packId:guid}/versions/{packVersionId:guid}/download", DownloadVersion)
|
||||
.WithName("Registry_DownloadVersion")
|
||||
.WithDescription("Get download info and increment download count");
|
||||
|
||||
group.MapDelete("{packId:guid}/versions/{packVersionId:guid}", DeleteVersion)
|
||||
.WithName("Registry_DeleteVersion")
|
||||
.WithDescription("Delete a draft version");
|
||||
|
||||
// Search and discovery endpoints
|
||||
group.MapGet("search", SearchPacks)
|
||||
.WithName("Registry_SearchPacks")
|
||||
.WithDescription("Search packs by name, description, or tags");
|
||||
|
||||
group.MapGet("by-tag/{tag}", GetPacksByTag)
|
||||
.WithName("Registry_GetPacksByTag")
|
||||
.WithDescription("Get packs by tag");
|
||||
|
||||
group.MapGet("popular", GetPopularPacks)
|
||||
.WithName("Registry_GetPopularPacks")
|
||||
.WithDescription("Get popular packs by download count");
|
||||
|
||||
group.MapGet("recent", GetRecentPacks)
|
||||
.WithName("Registry_GetRecentPacks")
|
||||
.WithDescription("Get recently updated packs");
|
||||
|
||||
// Statistics endpoint
|
||||
group.MapGet("stats", GetStats)
|
||||
.WithName("Registry_GetStats")
|
||||
.WithDescription("Get registry statistics");
|
||||
|
||||
return group;
|
||||
}
|
||||
|
||||
// ========== Pack CRUD Endpoints ==========
|
||||
|
||||
private static async Task<IResult> CreatePack(
|
||||
HttpContext context,
|
||||
[FromBody] CreatePackRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Name))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "Name is required", null, null));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.DisplayName))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "DisplayName is required", null, null));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
// Check for existing pack with same name
|
||||
var existing = await repository.GetPackByNameAsync(tenantId, request.Name.ToLowerInvariant(), cancellationToken);
|
||||
if (existing is not null)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"duplicate_name", $"Pack with name '{request.Name}' already exists", existing.PackId, null));
|
||||
}
|
||||
|
||||
var pack = Pack.Create(
|
||||
packId: Guid.NewGuid(),
|
||||
tenantId: tenantId,
|
||||
projectId: request.ProjectId,
|
||||
name: request.Name,
|
||||
displayName: request.DisplayName,
|
||||
description: request.Description,
|
||||
createdBy: actor,
|
||||
metadata: request.Metadata,
|
||||
tags: request.Tags,
|
||||
iconUri: request.IconUri,
|
||||
createdAt: now);
|
||||
|
||||
await repository.CreatePackAsync(pack, cancellationToken);
|
||||
|
||||
return Results.Created($"/api/v1/orchestrator/registry/packs/{pack.PackId}", PackResponse.FromDomain(pack));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetPackById(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var pack = await repository.GetPackByIdAsync(tenantId, packId, cancellationToken);
|
||||
|
||||
if (pack is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Pack {packId} not found", packId, null));
|
||||
}
|
||||
|
||||
return Results.Ok(PackResponse.FromDomain(pack));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetPackByName(
|
||||
HttpContext context,
|
||||
[FromRoute] string name,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var pack = await repository.GetPackByNameAsync(tenantId, name.ToLowerInvariant(), cancellationToken);
|
||||
|
||||
if (pack is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Pack '{name}' not found", null, null));
|
||||
}
|
||||
|
||||
return Results.Ok(PackResponse.FromDomain(pack));
|
||||
}
|
||||
|
||||
private static async Task<IResult> ListPacks(
|
||||
HttpContext context,
|
||||
[FromQuery] string? projectId,
|
||||
[FromQuery] string? status,
|
||||
[FromQuery] string? search,
|
||||
[FromQuery] string? tag,
|
||||
[FromQuery] int? limit,
|
||||
[FromQuery] int? offset,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? DefaultLimit, MaxLimit);
|
||||
var effectiveOffset = offset ?? 0;
|
||||
|
||||
PackStatus? statusFilter = null;
|
||||
if (!string.IsNullOrEmpty(status) && Enum.TryParse<PackStatus>(status, true, out var parsed))
|
||||
{
|
||||
statusFilter = parsed;
|
||||
}
|
||||
|
||||
var packs = await repository.ListPacksAsync(
|
||||
tenantId, projectId, statusFilter, search, tag,
|
||||
effectiveLimit, effectiveOffset, cancellationToken);
|
||||
|
||||
var totalCount = await repository.CountPacksAsync(
|
||||
tenantId, projectId, statusFilter, search, tag, cancellationToken);
|
||||
|
||||
var responses = packs.Select(PackResponse.FromDomain).ToList();
|
||||
var nextCursor = responses.Count == effectiveLimit
|
||||
? (effectiveOffset + effectiveLimit).ToString()
|
||||
: null;
|
||||
|
||||
return Results.Ok(new PackListResponse(responses, totalCount, nextCursor));
|
||||
}
|
||||
|
||||
private static async Task<IResult> UpdatePack(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromBody] UpdatePackRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var pack = await repository.GetPackByIdAsync(tenantId, packId, cancellationToken);
|
||||
if (pack is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Pack {packId} not found", packId, null));
|
||||
}
|
||||
|
||||
if (pack.IsTerminal)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"terminal_status", "Cannot update a pack in terminal status", packId, null));
|
||||
}
|
||||
|
||||
var updated = pack with
|
||||
{
|
||||
DisplayName = request.DisplayName ?? pack.DisplayName,
|
||||
Description = request.Description ?? pack.Description,
|
||||
Metadata = request.Metadata ?? pack.Metadata,
|
||||
Tags = request.Tags ?? pack.Tags,
|
||||
IconUri = request.IconUri ?? pack.IconUri,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = actor
|
||||
};
|
||||
|
||||
await repository.UpdatePackAsync(updated, cancellationToken);
|
||||
|
||||
return Results.Ok(PackResponse.FromDomain(updated));
|
||||
}
|
||||
|
||||
private static async Task<IResult> UpdatePackStatus(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromBody] UpdatePackStatusRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Status))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "Status is required", packId, null));
|
||||
}
|
||||
|
||||
if (!Enum.TryParse<PackStatus>(request.Status, true, out var newStatus))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_status", $"Invalid status: {request.Status}", packId, null));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var pack = await repository.GetPackByIdAsync(tenantId, packId, cancellationToken);
|
||||
if (pack is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Pack {packId} not found", packId, null));
|
||||
}
|
||||
|
||||
// Validate status transition
|
||||
var canTransition = newStatus switch
|
||||
{
|
||||
PackStatus.Published => pack.CanPublish,
|
||||
PackStatus.Deprecated => pack.CanDeprecate,
|
||||
PackStatus.Archived => pack.CanArchive,
|
||||
PackStatus.Draft => false, // Cannot go back to draft
|
||||
_ => false
|
||||
};
|
||||
|
||||
if (!canTransition)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"invalid_transition", $"Cannot transition from {pack.Status} to {newStatus}", packId, null));
|
||||
}
|
||||
|
||||
DateTimeOffset? publishedAt = newStatus == PackStatus.Published ? now : pack.PublishedAt;
|
||||
string? publishedBy = newStatus == PackStatus.Published ? actor : pack.PublishedBy;
|
||||
|
||||
await repository.UpdatePackStatusAsync(
|
||||
tenantId, packId, newStatus, actor, publishedAt, publishedBy, cancellationToken);
|
||||
|
||||
var updated = pack.WithStatus(newStatus, actor, now);
|
||||
return Results.Ok(PackResponse.FromDomain(updated));
|
||||
}
|
||||
|
||||
private static async Task<IResult> DeletePack(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var pack = await repository.GetPackByIdAsync(tenantId, packId, cancellationToken);
|
||||
if (pack is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Pack {packId} not found", packId, null));
|
||||
}
|
||||
|
||||
if (pack.Status != PackStatus.Draft)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"not_draft", "Only draft packs can be deleted", packId, null));
|
||||
}
|
||||
|
||||
if (pack.VersionCount > 0)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"has_versions", "Cannot delete pack with versions", packId, null));
|
||||
}
|
||||
|
||||
var deleted = await repository.DeletePackAsync(tenantId, packId, cancellationToken);
|
||||
if (!deleted)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"delete_failed", "Failed to delete pack", packId, null));
|
||||
}
|
||||
|
||||
return Results.NoContent();
|
||||
}
|
||||
|
||||
// ========== Pack Version Endpoints ==========
|
||||
|
||||
private static async Task<IResult> CreatePackVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromBody] CreatePackVersionRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Version))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "Version is required", packId, null));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ArtifactUri))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "ArtifactUri is required", packId, null));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ArtifactDigest))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "ArtifactDigest is required", packId, null));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var pack = await repository.GetPackByIdAsync(tenantId, packId, cancellationToken);
|
||||
if (pack is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Pack {packId} not found", packId, null));
|
||||
}
|
||||
|
||||
if (!pack.CanAddVersion)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"cannot_add_version", $"Cannot add version to pack in {pack.Status} status", packId, null));
|
||||
}
|
||||
|
||||
// Check for duplicate version
|
||||
var existing = await repository.GetVersionAsync(tenantId, packId, request.Version, cancellationToken);
|
||||
if (existing is not null)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"duplicate_version", $"Version {request.Version} already exists", packId, existing.PackVersionId));
|
||||
}
|
||||
|
||||
var version = PackVersion.Create(
|
||||
packVersionId: Guid.NewGuid(),
|
||||
tenantId: tenantId,
|
||||
packId: packId,
|
||||
version: request.Version,
|
||||
semVer: request.SemVer,
|
||||
artifactUri: request.ArtifactUri,
|
||||
artifactDigest: request.ArtifactDigest,
|
||||
artifactMimeType: request.ArtifactMimeType,
|
||||
artifactSizeBytes: request.ArtifactSizeBytes,
|
||||
manifestJson: request.ManifestJson,
|
||||
manifestDigest: request.ManifestDigest,
|
||||
releaseNotes: request.ReleaseNotes,
|
||||
minEngineVersion: request.MinEngineVersion,
|
||||
dependencies: request.Dependencies,
|
||||
createdBy: actor,
|
||||
metadata: request.Metadata,
|
||||
createdAt: now);
|
||||
|
||||
await repository.CreateVersionAsync(version, cancellationToken);
|
||||
|
||||
// Update pack version count
|
||||
var updatedPack = pack.WithVersionAdded(request.Version, actor, now);
|
||||
await repository.UpdatePackAsync(updatedPack, cancellationToken);
|
||||
|
||||
return Results.Created(
|
||||
$"/api/v1/orchestrator/registry/packs/{packId}/versions/{version.PackVersionId}",
|
||||
PackVersionResponse.FromDomain(version));
|
||||
}
|
||||
|
||||
private static async Task<IResult> ListVersions(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromQuery] string? status,
|
||||
[FromQuery] int? limit,
|
||||
[FromQuery] int? offset,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? DefaultLimit, MaxLimit);
|
||||
var effectiveOffset = offset ?? 0;
|
||||
|
||||
PackVersionStatus? statusFilter = null;
|
||||
if (!string.IsNullOrEmpty(status) && Enum.TryParse<PackVersionStatus>(status, true, out var parsed))
|
||||
{
|
||||
statusFilter = parsed;
|
||||
}
|
||||
|
||||
var versions = await repository.ListVersionsAsync(
|
||||
tenantId, packId, statusFilter, effectiveLimit, effectiveOffset, cancellationToken);
|
||||
|
||||
var totalCount = await repository.CountVersionsAsync(
|
||||
tenantId, packId, statusFilter, cancellationToken);
|
||||
|
||||
var responses = versions.Select(PackVersionResponse.FromDomain).ToList();
|
||||
var nextCursor = responses.Count == effectiveLimit
|
||||
? (effectiveOffset + effectiveLimit).ToString()
|
||||
: null;
|
||||
|
||||
return Results.Ok(new PackVersionListResponse(responses, totalCount, nextCursor));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromRoute] string version,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var packVersion = await repository.GetVersionAsync(tenantId, packId, version, cancellationToken);
|
||||
|
||||
if (packVersion is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Version {version} not found for pack {packId}", packId, null));
|
||||
}
|
||||
|
||||
return Results.Ok(PackVersionResponse.FromDomain(packVersion));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetLatestVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromQuery] bool? includePrerelease,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var version = await repository.GetLatestVersionAsync(
|
||||
tenantId, packId, includePrerelease ?? false, cancellationToken);
|
||||
|
||||
if (version is null)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"No published versions found for pack {packId}", packId, null));
|
||||
}
|
||||
|
||||
return Results.Ok(PackVersionResponse.FromDomain(version));
|
||||
}
|
||||
|
||||
private static async Task<IResult> UpdateVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromRoute] Guid packVersionId,
|
||||
[FromBody] UpdatePackVersionRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var version = await repository.GetVersionByIdAsync(tenantId, packVersionId, cancellationToken);
|
||||
if (version is null || version.PackId != packId)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Version {packVersionId} not found", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (version.IsTerminal)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"terminal_status", "Cannot update version in terminal status", packId, packVersionId));
|
||||
}
|
||||
|
||||
var updated = version with
|
||||
{
|
||||
ReleaseNotes = request.ReleaseNotes ?? version.ReleaseNotes,
|
||||
Metadata = request.Metadata ?? version.Metadata,
|
||||
UpdatedAt = now,
|
||||
UpdatedBy = actor
|
||||
};
|
||||
|
||||
await repository.UpdateVersionAsync(updated, cancellationToken);
|
||||
|
||||
return Results.Ok(PackVersionResponse.FromDomain(updated));
|
||||
}
|
||||
|
||||
private static async Task<IResult> UpdateVersionStatus(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromRoute] Guid packVersionId,
|
||||
[FromBody] UpdatePackVersionStatusRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Status))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "Status is required", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (!Enum.TryParse<PackVersionStatus>(request.Status, true, out var newStatus))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_status", $"Invalid status: {request.Status}", packId, packVersionId));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var version = await repository.GetVersionByIdAsync(tenantId, packVersionId, cancellationToken);
|
||||
if (version is null || version.PackId != packId)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Version {packVersionId} not found", packId, packVersionId));
|
||||
}
|
||||
|
||||
// Validate status transition
|
||||
var canTransition = newStatus switch
|
||||
{
|
||||
PackVersionStatus.Published => version.CanPublish,
|
||||
PackVersionStatus.Deprecated => version.CanDeprecate,
|
||||
PackVersionStatus.Archived => version.CanArchive,
|
||||
PackVersionStatus.Draft => false,
|
||||
_ => false
|
||||
};
|
||||
|
||||
if (!canTransition)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"invalid_transition", $"Cannot transition from {version.Status} to {newStatus}", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (newStatus == PackVersionStatus.Deprecated && string.IsNullOrWhiteSpace(request.DeprecationReason))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "DeprecationReason is required when deprecating", packId, packVersionId));
|
||||
}
|
||||
|
||||
DateTimeOffset? publishedAt = newStatus == PackVersionStatus.Published ? now : version.PublishedAt;
|
||||
string? publishedBy = newStatus == PackVersionStatus.Published ? actor : version.PublishedBy;
|
||||
DateTimeOffset? deprecatedAt = newStatus == PackVersionStatus.Deprecated ? now : version.DeprecatedAt;
|
||||
string? deprecatedBy = newStatus == PackVersionStatus.Deprecated ? actor : version.DeprecatedBy;
|
||||
|
||||
await repository.UpdateVersionStatusAsync(
|
||||
tenantId, packVersionId, newStatus, actor,
|
||||
publishedAt, publishedBy,
|
||||
deprecatedAt, deprecatedBy, request.DeprecationReason,
|
||||
cancellationToken);
|
||||
|
||||
var updated = newStatus == PackVersionStatus.Deprecated
|
||||
? version.WithDeprecation(actor, request.DeprecationReason, now)
|
||||
: version.WithStatus(newStatus, actor, now);
|
||||
|
||||
return Results.Ok(PackVersionResponse.FromDomain(updated));
|
||||
}
|
||||
|
||||
private static async Task<IResult> SignVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromRoute] Guid packVersionId,
|
||||
[FromBody] SignPackVersionRequest request,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
[FromServices] TimeProvider timeProvider,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.SignatureUri))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "SignatureUri is required", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.SignatureAlgorithm))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "SignatureAlgorithm is required", packId, packVersionId));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var actor = context.User?.Identity?.Name ?? "system";
|
||||
var now = timeProvider.GetUtcNow();
|
||||
|
||||
var version = await repository.GetVersionByIdAsync(tenantId, packVersionId, cancellationToken);
|
||||
if (version is null || version.PackId != packId)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Version {packVersionId} not found", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (version.IsSigned)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"already_signed", "Version is already signed", packId, packVersionId));
|
||||
}
|
||||
|
||||
await repository.UpdateVersionSignatureAsync(
|
||||
tenantId, packVersionId,
|
||||
request.SignatureUri, request.SignatureAlgorithm,
|
||||
actor, now,
|
||||
cancellationToken);
|
||||
|
||||
var signed = version.WithSignature(request.SignatureUri, request.SignatureAlgorithm, actor, now);
|
||||
return Results.Ok(PackVersionResponse.FromDomain(signed));
|
||||
}
|
||||
|
||||
private static async Task<IResult> DownloadVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromRoute] Guid packVersionId,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var version = await repository.GetVersionByIdAsync(tenantId, packVersionId, cancellationToken);
|
||||
if (version is null || version.PackId != packId)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Version {packVersionId} not found", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (version.Status != PackVersionStatus.Published)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"not_published", "Only published versions can be downloaded", packId, packVersionId));
|
||||
}
|
||||
|
||||
// Increment download count
|
||||
await repository.IncrementDownloadCountAsync(tenantId, packVersionId, cancellationToken);
|
||||
|
||||
return Results.Ok(new PackVersionDownloadResponse(
|
||||
version.PackVersionId,
|
||||
version.Version,
|
||||
version.ArtifactUri,
|
||||
version.ArtifactDigest,
|
||||
version.ArtifactMimeType,
|
||||
version.ArtifactSizeBytes,
|
||||
version.SignatureUri,
|
||||
version.SignatureAlgorithm));
|
||||
}
|
||||
|
||||
private static async Task<IResult> DeleteVersion(
|
||||
HttpContext context,
|
||||
[FromRoute] Guid packId,
|
||||
[FromRoute] Guid packVersionId,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
|
||||
var version = await repository.GetVersionByIdAsync(tenantId, packVersionId, cancellationToken);
|
||||
if (version is null || version.PackId != packId)
|
||||
{
|
||||
return Results.NotFound(new PackRegistryErrorResponse(
|
||||
"not_found", $"Version {packVersionId} not found", packId, packVersionId));
|
||||
}
|
||||
|
||||
if (version.Status != PackVersionStatus.Draft)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"not_draft", "Only draft versions can be deleted", packId, packVersionId));
|
||||
}
|
||||
|
||||
var deleted = await repository.DeleteVersionAsync(tenantId, packVersionId, cancellationToken);
|
||||
if (!deleted)
|
||||
{
|
||||
return Results.Conflict(new PackRegistryErrorResponse(
|
||||
"delete_failed", "Failed to delete version", packId, packVersionId));
|
||||
}
|
||||
|
||||
return Results.NoContent();
|
||||
}
|
||||
|
||||
// ========== Search and Discovery Endpoints ==========
|
||||
|
||||
private static async Task<IResult> SearchPacks(
|
||||
HttpContext context,
|
||||
[FromQuery] string query,
|
||||
[FromQuery] string? status,
|
||||
[FromQuery] int? limit,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(query))
|
||||
{
|
||||
return Results.BadRequest(new PackRegistryErrorResponse(
|
||||
"invalid_request", "Query is required", null, null));
|
||||
}
|
||||
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? DefaultLimit, MaxLimit);
|
||||
|
||||
PackStatus? statusFilter = null;
|
||||
if (!string.IsNullOrEmpty(status) && Enum.TryParse<PackStatus>(status, true, out var parsed))
|
||||
{
|
||||
statusFilter = parsed;
|
||||
}
|
||||
|
||||
var packs = await repository.SearchPacksAsync(
|
||||
tenantId, query, statusFilter, effectiveLimit, cancellationToken);
|
||||
|
||||
var responses = packs.Select(PackResponse.FromDomain).ToList();
|
||||
return Results.Ok(new PackSearchResponse(responses, query));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetPacksByTag(
|
||||
HttpContext context,
|
||||
[FromRoute] string tag,
|
||||
[FromQuery] int? limit,
|
||||
[FromQuery] int? offset,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? DefaultLimit, MaxLimit);
|
||||
var effectiveOffset = offset ?? 0;
|
||||
|
||||
var packs = await repository.GetPacksByTagAsync(
|
||||
tenantId, tag, effectiveLimit, effectiveOffset, cancellationToken);
|
||||
|
||||
var responses = packs.Select(PackResponse.FromDomain).ToList();
|
||||
return Results.Ok(new PackListResponse(responses, responses.Count, null));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetPopularPacks(
|
||||
HttpContext context,
|
||||
[FromQuery] int? limit,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? 10, 50);
|
||||
|
||||
var packs = await repository.GetPopularPacksAsync(tenantId, effectiveLimit, cancellationToken);
|
||||
|
||||
var responses = packs.Select(PackResponse.FromDomain).ToList();
|
||||
return Results.Ok(new PackListResponse(responses, responses.Count, null));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetRecentPacks(
|
||||
HttpContext context,
|
||||
[FromQuery] int? limit,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var effectiveLimit = Math.Min(limit ?? 10, 50);
|
||||
|
||||
var packs = await repository.GetRecentPacksAsync(tenantId, effectiveLimit, cancellationToken);
|
||||
|
||||
var responses = packs.Select(PackResponse.FromDomain).ToList();
|
||||
return Results.Ok(new PackListResponse(responses, responses.Count, null));
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetStats(
|
||||
HttpContext context,
|
||||
[FromServices] TenantResolver tenantResolver,
|
||||
[FromServices] IPackRegistryRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var tenantId = tenantResolver.Resolve(context);
|
||||
var stats = await repository.GetStatsAsync(tenantId, cancellationToken);
|
||||
|
||||
return Results.Ok(new PackRegistryStatsResponse(
|
||||
stats.TotalPacks,
|
||||
stats.PublishedPacks,
|
||||
stats.TotalVersions,
|
||||
stats.PublishedVersions,
|
||||
stats.TotalDownloads,
|
||||
stats.LastUpdatedAt));
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ using StellaOps.Orchestrator.Infrastructure;
|
||||
using StellaOps.Orchestrator.WebService.Endpoints;
|
||||
using StellaOps.Orchestrator.WebService.Services;
|
||||
using StellaOps.Orchestrator.WebService.Streaming;
|
||||
using StellaOps.Telemetry.Core;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
@@ -10,6 +11,36 @@ builder.Services.AddRouting(options => options.LowercaseUrls = true);
|
||||
builder.Services.AddEndpointsApiExplorer();
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
// Register StellaOps telemetry with OpenTelemetry integration
|
||||
// Per ORCH-OBS-50-001: Wire StellaOps.Telemetry.Core into orchestrator host
|
||||
builder.Services.AddStellaOpsTelemetry(
|
||||
builder.Configuration,
|
||||
serviceName: "StellaOps.Orchestrator",
|
||||
serviceVersion: "1.0.0",
|
||||
configureMetrics: meterBuilder =>
|
||||
{
|
||||
// Include the existing orchestrator metrics meter
|
||||
meterBuilder.AddMeter("StellaOps.Orchestrator");
|
||||
meterBuilder.AddMeter("StellaOps.GoldenSignals");
|
||||
},
|
||||
configureTracing: tracerBuilder =>
|
||||
{
|
||||
// Add orchestrator activity source for custom spans
|
||||
tracerBuilder.AddSource("StellaOps.Orchestrator");
|
||||
});
|
||||
|
||||
// Register telemetry context propagation
|
||||
builder.Services.AddTelemetryContextPropagation();
|
||||
|
||||
// Register golden signal metrics for scheduler instrumentation
|
||||
builder.Services.AddGoldenSignalMetrics();
|
||||
|
||||
// Register incident mode for enhanced telemetry during incidents
|
||||
builder.Services.AddIncidentMode(builder.Configuration);
|
||||
|
||||
// Register sealed-mode telemetry for air-gapped operation
|
||||
builder.Services.AddSealedModeTelemetry(builder.Configuration);
|
||||
|
||||
// Register Orchestrator infrastructure (Postgres repositories, data source)
|
||||
builder.Services.AddOrchestratorInfrastructure(builder.Configuration);
|
||||
|
||||
@@ -35,6 +66,10 @@ if (app.Environment.IsDevelopment())
|
||||
app.MapOpenApi();
|
||||
}
|
||||
|
||||
// Enable telemetry context propagation (extracts tenant/actor/correlation from headers)
|
||||
// Per ORCH-OBS-50-001
|
||||
app.UseStellaOpsTelemetryContext();
|
||||
|
||||
// Enable WebSocket support for streaming endpoints
|
||||
app.UseWebSockets();
|
||||
|
||||
@@ -53,6 +88,7 @@ app.MapRunEndpoints();
|
||||
app.MapJobEndpoints();
|
||||
app.MapDagEndpoints();
|
||||
app.MapPackRunEndpoints();
|
||||
app.MapPackRegistryEndpoints();
|
||||
|
||||
// Register streaming endpoints
|
||||
app.MapStreamEndpoints();
|
||||
|
||||
@@ -26,14 +26,15 @@
|
||||
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
|
||||
|
||||
|
||||
<ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/>
|
||||
|
||||
|
||||
|
||||
|
||||
<ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/>
|
||||
|
||||
|
||||
|
||||
<ProjectReference Include="..\..\..\Telemetry\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core\StellaOps.Telemetry.Core.csproj"/>
|
||||
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user