Merge branch 'main' of https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled

This commit is contained in:
master
2025-12-11 11:00:51 +02:00
596 changed files with 95428 additions and 15743 deletions

View File

@@ -1,8 +1,6 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.AirGap.Controller.Options;
using StellaOps.AirGap.Controller.Services;
using StellaOps.AirGap.Controller.Stores;
@@ -15,7 +13,6 @@ public static class AirGapControllerServiceCollectionExtensions
{
public static IServiceCollection AddAirGapController(this IServiceCollection services, IConfiguration configuration)
{
services.Configure<AirGapControllerMongoOptions>(configuration.GetSection("AirGap:Mongo"));
services.Configure<AirGapStartupOptions>(configuration.GetSection("AirGap:Startup"));
services.AddSingleton<AirGapTelemetry>();
@@ -28,19 +25,9 @@ public static class AirGapControllerServiceCollectionExtensions
services.AddSingleton<IAirGapStateStore>(sp =>
{
var opts = sp.GetRequiredService<IOptions<AirGapControllerMongoOptions>>().Value;
var logger = sp.GetRequiredService<ILogger<MongoAirGapStateStore>>();
if (string.IsNullOrWhiteSpace(opts.ConnectionString))
{
logger.LogInformation("AirGap controller using in-memory state store (Mongo connection string not configured).");
return new InMemoryAirGapStateStore();
}
var mongoClient = new MongoClient(opts.ConnectionString);
var database = mongoClient.GetDatabase(string.IsNullOrWhiteSpace(opts.Database) ? "stellaops_airgap" : opts.Database);
var collection = MongoAirGapStateStore.EnsureCollection(database);
logger.LogInformation("AirGap controller using Mongo state store (db={Database}, collection={Collection}).", opts.Database, opts.Collection);
return new MongoAirGapStateStore(collection);
var logger = sp.GetRequiredService<ILogger<InMemoryAirGapStateStore>>();
logger.LogWarning("AirGap controller using in-memory state store; state resets on process restart.");
return new InMemoryAirGapStateStore();
});
services.AddHostedService<AirGapStartupDiagnosticsHostedService>();

View File

@@ -1,22 +0,0 @@
namespace StellaOps.AirGap.Controller.Options;
/// <summary>
/// Mongo configuration for the air-gap controller state store.
/// </summary>
public sealed class AirGapControllerMongoOptions
{
/// <summary>
/// Mongo connection string; when missing, the controller falls back to the in-memory store.
/// </summary>
public string? ConnectionString { get; set; }
/// <summary>
/// Database name. Default: "stellaops_airgap".
/// </summary>
public string Database { get; set; } = "stellaops_airgap";
/// <summary>
/// Collection name for state documents. Default: "airgap_state".
/// </summary>
public string Collection { get; set; } = "airgap_state";
}

View File

@@ -9,7 +9,4 @@
<ProjectReference Include="../StellaOps.AirGap.Time/StellaOps.AirGap.Time.csproj" />
<ProjectReference Include="../StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
</ItemGroup>
</Project>

View File

@@ -1,17 +1,18 @@
using System.Collections.Concurrent;
using StellaOps.AirGap.Controller.Domain;
namespace StellaOps.AirGap.Controller.Stores;
public sealed class InMemoryAirGapStateStore : IAirGapStateStore
{
private readonly Dictionary<string, AirGapState> _states = new(StringComparer.Ordinal);
private readonly ConcurrentDictionary<string, AirGapState> _states = new(StringComparer.Ordinal);
public Task<AirGapState> GetAsync(string tenantId, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
if (_states.TryGetValue(tenantId, out var state))
{
return Task.FromResult(state);
return Task.FromResult(state with { });
}
return Task.FromResult(new AirGapState { TenantId = tenantId });
@@ -20,7 +21,7 @@ public sealed class InMemoryAirGapStateStore : IAirGapStateStore
public Task SetAsync(AirGapState state, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
_states[state.TenantId] = state;
_states[state.TenantId] = state with { };
return Task.CompletedTask;
}
}

View File

@@ -1,156 +0,0 @@
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.AirGap.Controller.Domain;
using StellaOps.AirGap.Time.Models;
namespace StellaOps.AirGap.Controller.Stores;
/// <summary>
/// Mongo-backed air-gap state store; single document per tenant.
/// </summary>
internal sealed class MongoAirGapStateStore : IAirGapStateStore
{
private readonly IMongoCollection<AirGapStateDocument> _collection;
public MongoAirGapStateStore(IMongoCollection<AirGapStateDocument> collection)
{
_collection = collection;
}
public async Task<AirGapState> GetAsync(string tenantId, CancellationToken cancellationToken = default)
{
var filter = Builders<AirGapStateDocument>.Filter.And(
Builders<AirGapStateDocument>.Filter.Eq(x => x.TenantId, tenantId),
Builders<AirGapStateDocument>.Filter.Eq(x => x.Id, AirGapState.SingletonId));
var doc = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return doc?.ToDomain() ?? new AirGapState { TenantId = tenantId };
}
public async Task SetAsync(AirGapState state, CancellationToken cancellationToken = default)
{
var doc = AirGapStateDocument.FromDomain(state);
var filter = Builders<AirGapStateDocument>.Filter.And(
Builders<AirGapStateDocument>.Filter.Eq(x => x.TenantId, state.TenantId),
Builders<AirGapStateDocument>.Filter.Eq(x => x.Id, AirGapState.SingletonId));
var options = new ReplaceOptions { IsUpsert = true };
await _collection.ReplaceOneAsync(filter, doc, options, cancellationToken).ConfigureAwait(false);
}
internal static IMongoCollection<AirGapStateDocument> EnsureCollection(IMongoDatabase database)
{
var collectionName = "airgap_state";
var exists = database.ListCollectionNames().ToList().Contains(collectionName);
if (!exists)
{
database.CreateCollection(collectionName);
}
var collection = database.GetCollection<AirGapStateDocument>(collectionName);
var keys = Builders<AirGapStateDocument>.IndexKeys
.Ascending(x => x.TenantId)
.Ascending(x => x.Id);
var model = new CreateIndexModel<AirGapStateDocument>(keys, new CreateIndexOptions { Unique = true });
collection.Indexes.CreateOne(model);
return collection;
}
}
internal sealed class AirGapStateDocument
{
[BsonId]
public string Id { get; init; } = AirGapState.SingletonId;
[BsonElement("tenant_id")]
public string TenantId { get; init; } = "default";
[BsonElement("sealed")]
public bool Sealed { get; init; }
= false;
[BsonElement("policy_hash")]
public string? PolicyHash { get; init; }
= null;
[BsonElement("time_anchor")]
public AirGapTimeAnchorDocument TimeAnchor { get; init; } = new();
[BsonElement("staleness_budget")]
public StalenessBudgetDocument StalenessBudget { get; init; } = new();
[BsonElement("last_transition_at")]
public DateTimeOffset LastTransitionAt { get; init; }
= DateTimeOffset.MinValue;
public AirGapState ToDomain() => new()
{
TenantId = TenantId,
Sealed = Sealed,
PolicyHash = PolicyHash,
TimeAnchor = TimeAnchor.ToDomain(),
StalenessBudget = StalenessBudget.ToDomain(),
LastTransitionAt = LastTransitionAt
};
public static AirGapStateDocument FromDomain(AirGapState state) => new()
{
TenantId = state.TenantId,
Sealed = state.Sealed,
PolicyHash = state.PolicyHash,
TimeAnchor = AirGapTimeAnchorDocument.FromDomain(state.TimeAnchor),
StalenessBudget = StalenessBudgetDocument.FromDomain(state.StalenessBudget),
LastTransitionAt = state.LastTransitionAt
};
}
internal sealed class AirGapTimeAnchorDocument
{
[BsonElement("anchor_time")]
public DateTimeOffset AnchorTime { get; init; }
= DateTimeOffset.MinValue;
[BsonElement("source")]
public string Source { get; init; } = "unknown";
[BsonElement("format")]
public string Format { get; init; } = "unknown";
[BsonElement("signature_fp")]
public string SignatureFingerprint { get; init; } = string.Empty;
[BsonElement("token_digest")]
public string TokenDigest { get; init; } = string.Empty;
public StellaOps.AirGap.Time.Models.TimeAnchor ToDomain() =>
new(AnchorTime, Source, Format, SignatureFingerprint, TokenDigest);
public static AirGapTimeAnchorDocument FromDomain(StellaOps.AirGap.Time.Models.TimeAnchor anchor) => new()
{
AnchorTime = anchor.AnchorTime,
Source = anchor.Source,
Format = anchor.Format,
SignatureFingerprint = anchor.SignatureFingerprint,
TokenDigest = anchor.TokenDigest
};
}
internal sealed class StalenessBudgetDocument
{
[BsonElement("warning_seconds")]
public long WarningSeconds { get; init; } = StalenessBudget.Default.WarningSeconds;
[BsonElement("breach_seconds")]
public long BreachSeconds { get; init; } = StalenessBudget.Default.BreachSeconds;
public StalenessBudget ToDomain() => new(WarningSeconds, BreachSeconds);
public static StalenessBudgetDocument FromDomain(StalenessBudget budget) => new()
{
WarningSeconds = budget.WarningSeconds,
BreachSeconds = budget.BreachSeconds
};
}

View File

@@ -15,3 +15,6 @@
| AIRGAP-IMP-56-002 | DONE | Root rotation policy (dual approval) + trust store; integrated into import validator; tests passing. | 2025-11-20 |
| AIRGAP-IMP-57-001 | DONE | In-memory RLS bundle catalog/items repos + schema doc; deterministic ordering and tests passing. | 2025-11-20 |
| AIRGAP-TIME-57-001 | DONE | Staleness calc, loader/fixtures, TimeStatusService/store, sealed validator, Ed25519 Roughtime + RFC3161 SignedCms verification, APIs + config sample delivered; awaiting final trust roots. | 2025-11-20 |
| MR-T10.6.1 | DONE | Removed Mongo-backed air-gap state store; controller now uses in-memory store only. | 2025-12-11 |
| MR-T10.6.2 | DONE | DI simplified to register in-memory air-gap state store (no Mongo options or client). | 2025-12-11 |
| MR-T10.6.3 | DONE | Converted controller tests to in-memory store; dropped Mongo2Go dependency. | 2025-12-11 |

View File

@@ -0,0 +1,58 @@
using System;
using System.Collections.Concurrent;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Attestor.Core.Bulk;
namespace StellaOps.Attestor.Infrastructure.Bulk;
internal sealed class InMemoryBulkVerificationJobStore : IBulkVerificationJobStore
{
private readonly ConcurrentQueue<BulkVerificationJob> _queue = new();
private readonly ConcurrentDictionary<string, BulkVerificationJob> _jobs = new(StringComparer.OrdinalIgnoreCase);
public Task<BulkVerificationJob> CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
_jobs[job.Id] = job;
_queue.Enqueue(job);
return Task.FromResult(job);
}
public Task<BulkVerificationJob?> GetAsync(string jobId, CancellationToken cancellationToken = default)
{
_jobs.TryGetValue(jobId, out var job);
return Task.FromResult(job);
}
public Task<BulkVerificationJob?> TryAcquireAsync(CancellationToken cancellationToken = default)
{
while (_queue.TryDequeue(out var job))
{
if (job.Status != BulkVerificationJobStatus.Queued)
{
continue;
}
job.Status = BulkVerificationJobStatus.Running;
job.StartedAt ??= DateTimeOffset.UtcNow;
return Task.FromResult<BulkVerificationJob?>(job);
}
return Task.FromResult<BulkVerificationJob?>(null);
}
public Task<bool> TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
_jobs[job.Id] = job;
return Task.FromResult(true);
}
public Task<int> CountQueuedAsync(CancellationToken cancellationToken = default)
{
var count = _jobs.Values.Count(j => j.Status == BulkVerificationJobStatus.Queued);
return Task.FromResult(count);
}
}

View File

@@ -1,343 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Bulk;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Infrastructure.Bulk;
internal sealed class MongoBulkVerificationJobStore : IBulkVerificationJobStore
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IMongoCollection<JobDocument> _collection;
public MongoBulkVerificationJobStore(IMongoCollection<JobDocument> collection)
{
_collection = collection ?? throw new ArgumentNullException(nameof(collection));
}
public async Task<BulkVerificationJob> CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
job.Version = 0;
var document = JobDocument.FromDomain(job, SerializerOptions);
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
job.Version = document.Version;
return job;
}
public async Task<BulkVerificationJob?> GetAsync(string jobId, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(jobId))
{
return null;
}
var filter = Builders<JobDocument>.Filter.Eq(doc => doc.Id, jobId);
var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain(SerializerOptions);
}
public async Task<BulkVerificationJob?> TryAcquireAsync(CancellationToken cancellationToken = default)
{
var filter = Builders<JobDocument>.Filter.Eq(doc => doc.Status, BulkVerificationJobStatus.Queued);
var update = Builders<JobDocument>.Update
.Set(doc => doc.Status, BulkVerificationJobStatus.Running)
.Set(doc => doc.StartedAt, DateTimeOffset.UtcNow.UtcDateTime)
.Inc(doc => doc.Version, 1);
var options = new FindOneAndUpdateOptions<JobDocument>
{
Sort = Builders<JobDocument>.Sort.Ascending(doc => doc.CreatedAt),
ReturnDocument = ReturnDocument.After
};
var document = await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false);
return document?.ToDomain(SerializerOptions);
}
public async Task<bool> TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
var currentVersion = job.Version;
var replacement = JobDocument.FromDomain(job, SerializerOptions);
replacement.Version = currentVersion + 1;
var filter = Builders<JobDocument>.Filter.Where(doc => doc.Id == job.Id && doc.Version == currentVersion);
var result = await _collection.ReplaceOneAsync(filter, replacement, cancellationToken: cancellationToken).ConfigureAwait(false);
if (result.ModifiedCount == 0)
{
return false;
}
job.Version = replacement.Version;
return true;
}
public async Task<int> CountQueuedAsync(CancellationToken cancellationToken = default)
{
var filter = Builders<JobDocument>.Filter.Eq(doc => doc.Status, BulkVerificationJobStatus.Queued);
var count = await _collection.CountDocumentsAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false);
return Convert.ToInt32(count);
}
internal sealed class JobDocument
{
[BsonId]
[BsonElement("_id")]
public string Id { get; set; } = string.Empty;
[BsonElement("version")]
public int Version { get; set; }
[BsonElement("status")]
[BsonRepresentation(BsonType.String)]
public BulkVerificationJobStatus Status { get; set; }
[BsonElement("createdAt")]
public DateTime CreatedAt { get; set; }
[BsonElement("startedAt")]
[BsonIgnoreIfNull]
public DateTime? StartedAt { get; set; }
[BsonElement("completedAt")]
[BsonIgnoreIfNull]
public DateTime? CompletedAt { get; set; }
[BsonElement("context")]
public JobContextDocument Context { get; set; } = new();
[BsonElement("items")]
public List<JobItemDocument> Items { get; set; } = new();
[BsonElement("processed")]
public int ProcessedCount { get; set; }
[BsonElement("succeeded")]
public int SucceededCount { get; set; }
[BsonElement("failed")]
public int FailedCount { get; set; }
[BsonElement("failureReason")]
[BsonIgnoreIfNull]
public string? FailureReason { get; set; }
public static JobDocument FromDomain(BulkVerificationJob job, JsonSerializerOptions serializerOptions)
{
return new JobDocument
{
Id = job.Id,
Version = job.Version,
Status = job.Status,
CreatedAt = job.CreatedAt.UtcDateTime,
StartedAt = job.StartedAt?.UtcDateTime,
CompletedAt = job.CompletedAt?.UtcDateTime,
Context = JobContextDocument.FromDomain(job.Context),
Items = JobItemDocument.FromDomain(job.Items, serializerOptions),
ProcessedCount = job.ProcessedCount,
SucceededCount = job.SucceededCount,
FailedCount = job.FailedCount,
FailureReason = job.FailureReason
};
}
public BulkVerificationJob ToDomain(JsonSerializerOptions serializerOptions)
{
return new BulkVerificationJob
{
Id = Id,
Version = Version,
Status = Status,
CreatedAt = DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc),
StartedAt = StartedAt is null ? null : DateTime.SpecifyKind(StartedAt.Value, DateTimeKind.Utc),
CompletedAt = CompletedAt is null ? null : DateTime.SpecifyKind(CompletedAt.Value, DateTimeKind.Utc),
Context = Context.ToDomain(),
Items = JobItemDocument.ToDomain(Items, serializerOptions),
ProcessedCount = ProcessedCount,
SucceededCount = SucceededCount,
FailedCount = FailedCount,
FailureReason = FailureReason
};
}
}
internal sealed class JobContextDocument
{
[BsonElement("tenant")]
[BsonIgnoreIfNull]
public string? Tenant { get; set; }
[BsonElement("requestedBy")]
[BsonIgnoreIfNull]
public string? RequestedBy { get; set; }
[BsonElement("clientId")]
[BsonIgnoreIfNull]
public string? ClientId { get; set; }
[BsonElement("scopes")]
public List<string> Scopes { get; set; } = new();
public static JobContextDocument FromDomain(BulkVerificationJobContext context)
{
return new JobContextDocument
{
Tenant = context.Tenant,
RequestedBy = context.RequestedBy,
ClientId = context.ClientId,
Scopes = new List<string>(context.Scopes)
};
}
public BulkVerificationJobContext ToDomain()
{
return new BulkVerificationJobContext
{
Tenant = Tenant,
RequestedBy = RequestedBy,
ClientId = ClientId,
Scopes = new List<string>(Scopes ?? new List<string>())
};
}
}
internal sealed class JobItemDocument
{
[BsonElement("index")]
public int Index { get; set; }
[BsonElement("request")]
public ItemRequestDocument Request { get; set; } = new();
[BsonElement("status")]
[BsonRepresentation(BsonType.String)]
public BulkVerificationItemStatus Status { get; set; }
[BsonElement("startedAt")]
[BsonIgnoreIfNull]
public DateTime? StartedAt { get; set; }
[BsonElement("completedAt")]
[BsonIgnoreIfNull]
public DateTime? CompletedAt { get; set; }
[BsonElement("result")]
[BsonIgnoreIfNull]
public string? ResultJson { get; set; }
[BsonElement("error")]
[BsonIgnoreIfNull]
public string? Error { get; set; }
public static List<JobItemDocument> FromDomain(IEnumerable<BulkVerificationJobItem> items, JsonSerializerOptions serializerOptions)
{
var list = new List<JobItemDocument>();
foreach (var item in items)
{
list.Add(new JobItemDocument
{
Index = item.Index,
Request = ItemRequestDocument.FromDomain(item.Request),
Status = item.Status,
StartedAt = item.StartedAt?.UtcDateTime,
CompletedAt = item.CompletedAt?.UtcDateTime,
ResultJson = item.Result is null ? null : JsonSerializer.Serialize(item.Result, serializerOptions),
Error = item.Error
});
}
return list;
}
public static IList<BulkVerificationJobItem> ToDomain(IEnumerable<JobItemDocument> documents, JsonSerializerOptions serializerOptions)
{
var list = new List<BulkVerificationJobItem>();
foreach (var document in documents)
{
AttestorVerificationResult? result = null;
if (!string.IsNullOrWhiteSpace(document.ResultJson))
{
result = JsonSerializer.Deserialize<AttestorVerificationResult>(document.ResultJson, serializerOptions);
}
list.Add(new BulkVerificationJobItem
{
Index = document.Index,
Request = document.Request.ToDomain(),
Status = document.Status,
StartedAt = document.StartedAt is null ? null : DateTime.SpecifyKind(document.StartedAt.Value, DateTimeKind.Utc),
CompletedAt = document.CompletedAt is null ? null : DateTime.SpecifyKind(document.CompletedAt.Value, DateTimeKind.Utc),
Result = result,
Error = document.Error
});
}
return list;
}
}
internal sealed class ItemRequestDocument
{
[BsonElement("uuid")]
[BsonIgnoreIfNull]
public string? Uuid { get; set; }
[BsonElement("artifactSha256")]
[BsonIgnoreIfNull]
public string? ArtifactSha256 { get; set; }
[BsonElement("subject")]
[BsonIgnoreIfNull]
public string? Subject { get; set; }
[BsonElement("envelopeId")]
[BsonIgnoreIfNull]
public string? EnvelopeId { get; set; }
[BsonElement("policyVersion")]
[BsonIgnoreIfNull]
public string? PolicyVersion { get; set; }
[BsonElement("refreshProof")]
public bool RefreshProof { get; set; }
public static ItemRequestDocument FromDomain(BulkVerificationItemRequest request)
{
return new ItemRequestDocument
{
Uuid = request.Uuid,
ArtifactSha256 = request.ArtifactSha256,
Subject = request.Subject,
EnvelopeId = request.EnvelopeId,
PolicyVersion = request.PolicyVersion,
RefreshProof = request.RefreshProof
};
}
public BulkVerificationItemRequest ToDomain()
{
return new BulkVerificationItemRequest
{
Uuid = Uuid,
ArtifactSha256 = ArtifactSha256,
Subject = Subject,
EnvelopeId = EnvelopeId,
PolicyVersion = PolicyVersion,
RefreshProof = RefreshProof
};
}
}
}

View File

@@ -1,11 +1,10 @@
using System;
using System;
using Amazon.Runtime;
using Amazon.S3;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StackExchange.Redis;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Observability;
@@ -19,25 +18,26 @@ using StellaOps.Attestor.Infrastructure.Storage;
using StellaOps.Attestor.Infrastructure.Submission;
using StellaOps.Attestor.Infrastructure.Transparency;
using StellaOps.Attestor.Infrastructure.Verification;
namespace StellaOps.Attestor.Infrastructure;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services)
{
using StellaOps.Attestor.Infrastructure.Bulk;
namespace StellaOps.Attestor.Infrastructure;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services)
{
services.AddMemoryCache();
services.AddSingleton<IDsseCanonicalizer, DefaultDsseCanonicalizer>();
services.AddSingleton(sp =>
{
var canonicalizer = sp.GetRequiredService<IDsseCanonicalizer>();
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
return new AttestorSubmissionValidator(canonicalizer, options.Security.SignerIdentity.Mode);
});
services.AddSingleton<AttestorMetrics>();
services.AddSingleton<IAttestorSubmissionService, AttestorSubmissionService>();
services.AddSingleton<IAttestorVerificationService, AttestorVerificationService>();
services.AddSingleton(sp =>
{
var canonicalizer = sp.GetRequiredService<IDsseCanonicalizer>();
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
return new AttestorSubmissionValidator(canonicalizer, options.Security.SignerIdentity.Mode);
});
services.AddSingleton<AttestorMetrics>();
services.AddSingleton<IAttestorSubmissionService, AttestorSubmissionService>();
services.AddSingleton<IAttestorVerificationService, AttestorVerificationService>();
services.AddHttpClient<HttpRekorClient>(client =>
{
client.Timeout = TimeSpan.FromSeconds(30);
@@ -66,86 +66,55 @@ public static class ServiceCollectionExtensions
return sp.GetRequiredService<HttpTransparencyWitnessClient>();
});
services.AddSingleton<IMongoClient>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (string.IsNullOrWhiteSpace(options.Mongo.Uri))
{
throw new InvalidOperationException("Attestor MongoDB connection string is not configured.");
}
return new MongoClient(options.Mongo.Uri);
});
services.AddSingleton(sp =>
{
var opts = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
var client = sp.GetRequiredService<IMongoClient>();
var databaseName = MongoUrl.Create(opts.Mongo.Uri).DatabaseName ?? opts.Mongo.Database;
return client.GetDatabase(databaseName);
});
services.AddSingleton(sp =>
{
var opts = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
var database = sp.GetRequiredService<IMongoDatabase>();
return database.GetCollection<MongoAttestorEntryRepository.AttestorEntryDocument>(opts.Mongo.EntriesCollection);
});
services.AddSingleton(sp =>
{
var opts = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
var database = sp.GetRequiredService<IMongoDatabase>();
return database.GetCollection<MongoAttestorAuditSink.AttestorAuditDocument>(opts.Mongo.AuditCollection);
});
services.AddSingleton<IAttestorEntryRepository, MongoAttestorEntryRepository>();
services.AddSingleton<IAttestorAuditSink, MongoAttestorAuditSink>();
services.AddSingleton<IAttestorDedupeStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (string.IsNullOrWhiteSpace(options.Redis.Url))
{
return new InMemoryAttestorDedupeStore();
}
var multiplexer = sp.GetRequiredService<IConnectionMultiplexer>();
return new RedisAttestorDedupeStore(multiplexer, sp.GetRequiredService<IOptions<AttestorOptions>>());
});
services.AddSingleton<IConnectionMultiplexer>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (string.IsNullOrWhiteSpace(options.Redis.Url))
{
throw new InvalidOperationException("Redis connection string is required when redis dedupe is enabled.");
}
return ConnectionMultiplexer.Connect(options.Redis.Url);
});
services.AddSingleton<IAttestorArchiveStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (options.S3.Enabled && !string.IsNullOrWhiteSpace(options.S3.Endpoint) && !string.IsNullOrWhiteSpace(options.S3.Bucket))
{
var config = new AmazonS3Config
{
ServiceURL = options.S3.Endpoint,
ForcePathStyle = true,
UseHttp = !options.S3.UseTls
};
var client = new AmazonS3Client(FallbackCredentialsFactory.GetCredentials(), config);
return new S3AttestorArchiveStore(client, sp.GetRequiredService<IOptions<AttestorOptions>>(), sp.GetRequiredService<ILogger<S3AttestorArchiveStore>>());
}
return new NullAttestorArchiveStore(sp.GetRequiredService<ILogger<NullAttestorArchiveStore>>());
});
return services;
}
}
services.AddSingleton<IAttestorEntryRepository, InMemoryAttestorEntryRepository>();
services.AddSingleton<IAttestorAuditSink, InMemoryAttestorAuditSink>();
services.AddSingleton<IAttestorDedupeStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (string.IsNullOrWhiteSpace(options.Redis.Url))
{
return new InMemoryAttestorDedupeStore();
}
var multiplexer = sp.GetRequiredService<IConnectionMultiplexer>();
return new RedisAttestorDedupeStore(multiplexer, sp.GetRequiredService<IOptions<AttestorOptions>>());
});
services.AddSingleton<IConnectionMultiplexer>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (string.IsNullOrWhiteSpace(options.Redis.Url))
{
throw new InvalidOperationException("Redis connection string is required when redis dedupe is enabled.");
}
return ConnectionMultiplexer.Connect(options.Redis.Url);
});
services.AddSingleton<IAttestorArchiveStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (options.S3.Enabled && !string.IsNullOrWhiteSpace(options.S3.Endpoint) && !string.IsNullOrWhiteSpace(options.S3.Bucket))
{
var config = new AmazonS3Config
{
ServiceURL = options.S3.Endpoint,
ForcePathStyle = true,
UseHttp = !options.S3.UseTls
};
var client = new AmazonS3Client(FallbackCredentialsFactory.GetCredentials(), config);
return new S3AttestorArchiveStore(client, sp.GetRequiredService<IOptions<AttestorOptions>>(), sp.GetRequiredService<ILogger<S3AttestorArchiveStore>>());
}
return new NullAttestorArchiveStore(sp.GetRequiredService<ILogger<NullAttestorArchiveStore>>());
});
services.AddSingleton<IBulkVerificationJobStore, InMemoryBulkVerificationJobStore>();
return services;
}
}

View File

@@ -22,7 +22,6 @@
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="AWSSDK.S3" Version="4.0.2" />
</ItemGroup>

View File

@@ -0,0 +1,18 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Attestor.Core.Audit;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class InMemoryAttestorAuditSink : IAttestorAuditSink
{
public List<AttestorAuditRecord> Records { get; } = new();
public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default)
{
Records.Add(record);
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,170 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class InMemoryAttestorEntryRepository : IAttestorEntryRepository
{
private readonly ConcurrentDictionary<string, AttestorEntry> _entries = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, string> _bundleIndex = new(StringComparer.OrdinalIgnoreCase);
private readonly object _sync = new();
public Task<AttestorEntry?> GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
string? uuid;
lock (_sync)
{
_bundleIndex.TryGetValue(bundleSha256, out uuid);
}
if (uuid is not null && _entries.TryGetValue(uuid, out var entry))
{
return Task.FromResult<AttestorEntry?>(entry);
}
return Task.FromResult<AttestorEntry?>(null);
}
public Task<AttestorEntry?> GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default)
{
_entries.TryGetValue(rekorUuid, out var entry);
return Task.FromResult(entry);
}
public Task<IReadOnlyList<AttestorEntry>> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default)
{
List<AttestorEntry> snapshot;
lock (_sync)
{
snapshot = _entries.Values.ToList();
}
var entries = snapshot
.Where(e => string.Equals(e.Artifact.Sha256, artifactSha256, StringComparison.OrdinalIgnoreCase))
.OrderBy(e => e.CreatedAt)
.ToList();
return Task.FromResult<IReadOnlyList<AttestorEntry>>(entries);
}
public Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entry);
lock (_sync)
{
if (_bundleIndex.TryGetValue(entry.BundleSha256, out var existingUuid) &&
!string.Equals(existingUuid, entry.RekorUuid, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Bundle SHA '{entry.BundleSha256}' already exists.");
}
if (_entries.TryGetValue(entry.RekorUuid, out var existing) &&
!string.Equals(existing.BundleSha256, entry.BundleSha256, StringComparison.OrdinalIgnoreCase))
{
_bundleIndex.Remove(existing.BundleSha256);
}
_entries[entry.RekorUuid] = entry;
_bundleIndex[entry.BundleSha256] = entry.RekorUuid;
}
return Task.CompletedTask;
}
public Task<AttestorEntryQueryResult> QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(query);
var pageSize = query.PageSize <= 0 ? 50 : Math.Min(query.PageSize, 200);
List<AttestorEntry> snapshot;
lock (_sync)
{
snapshot = _entries.Values.ToList();
}
IEnumerable<AttestorEntry> sequence = snapshot;
if (!string.IsNullOrWhiteSpace(query.Subject))
{
var subject = query.Subject;
sequence = sequence.Where(e =>
string.Equals(e.Artifact.Sha256, subject, StringComparison.OrdinalIgnoreCase) ||
string.Equals(e.Artifact.ImageDigest, subject, StringComparison.OrdinalIgnoreCase) ||
string.Equals(e.Artifact.SubjectUri, subject, StringComparison.OrdinalIgnoreCase));
}
if (!string.IsNullOrWhiteSpace(query.Type))
{
sequence = sequence.Where(e => string.Equals(e.Artifact.Kind, query.Type, StringComparison.OrdinalIgnoreCase));
}
if (!string.IsNullOrWhiteSpace(query.Issuer))
{
sequence = sequence.Where(e => string.Equals(e.SignerIdentity.SubjectAlternativeName, query.Issuer, StringComparison.OrdinalIgnoreCase));
}
if (!string.IsNullOrWhiteSpace(query.Scope))
{
sequence = sequence.Where(e => string.Equals(e.SignerIdentity.Issuer, query.Scope, StringComparison.OrdinalIgnoreCase));
}
if (query.CreatedAfter is { } createdAfter)
{
sequence = sequence.Where(e => e.CreatedAt >= createdAfter);
}
if (query.CreatedBefore is { } createdBefore)
{
sequence = sequence.Where(e => e.CreatedAt <= createdBefore);
}
if (!string.IsNullOrWhiteSpace(query.ContinuationToken))
{
var continuation = AttestorEntryContinuationToken.Parse(query.ContinuationToken);
sequence = sequence.Where(e =>
{
var createdAt = e.CreatedAt;
if (createdAt < continuation.CreatedAt)
{
return true;
}
if (createdAt > continuation.CreatedAt)
{
return false;
}
return string.CompareOrdinal(e.RekorUuid, continuation.RekorUuid) >= 0;
});
}
var ordered = sequence
.OrderByDescending(e => e.CreatedAt)
.ThenBy(e => e.RekorUuid, StringComparer.Ordinal);
var page = ordered.Take(pageSize + 1).ToList();
AttestorEntry? next = null;
if (page.Count > pageSize)
{
next = page[^1];
page.RemoveAt(page.Count - 1);
}
var result = new AttestorEntryQueryResult
{
Items = page,
ContinuationToken = next is null
? null
: AttestorEntryContinuationToken.Encode(next.CreatedAt, next.RekorUuid)
};
return Task.FromResult(result);
}
}

View File

@@ -1,131 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Audit;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorAuditSink : IAttestorAuditSink
{
private readonly IMongoCollection<AttestorAuditDocument> _collection;
private static int _indexesInitialized;
public MongoAttestorAuditSink(IMongoCollection<AttestorAuditDocument> collection)
{
_collection = collection;
EnsureIndexes();
}
public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default)
{
var document = AttestorAuditDocument.FromRecord(record);
return _collection.InsertOneAsync(document, cancellationToken: cancellationToken);
}
private void EnsureIndexes()
{
if (Interlocked.Exchange(ref _indexesInitialized, 1) == 1)
{
return;
}
var index = new CreateIndexModel<AttestorAuditDocument>(
Builders<AttestorAuditDocument>.IndexKeys.Descending(x => x.Timestamp),
new CreateIndexOptions { Name = "ts_desc" });
_collection.Indexes.CreateOne(index);
}
internal sealed class AttestorAuditDocument
{
[BsonId]
public ObjectId Id { get; set; }
[BsonElement("ts")]
public BsonDateTime Timestamp { get; set; } = BsonDateTime.Create(DateTime.UtcNow);
[BsonElement("action")]
public string Action { get; set; } = string.Empty;
[BsonElement("result")]
public string Result { get; set; } = string.Empty;
[BsonElement("rekorUuid")]
public string? RekorUuid { get; set; }
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("artifactSha256")]
public string ArtifactSha256 { get; set; } = string.Empty;
[BsonElement("bundleSha256")]
public string BundleSha256 { get; set; } = string.Empty;
[BsonElement("backend")]
public string Backend { get; set; } = string.Empty;
[BsonElement("latencyMs")]
public long LatencyMs { get; set; }
[BsonElement("caller")]
public CallerDocument Caller { get; set; } = new();
[BsonElement("metadata")]
public BsonDocument Metadata { get; set; } = new();
public static AttestorAuditDocument FromRecord(AttestorAuditRecord record)
{
var metadata = new BsonDocument();
foreach (var kvp in record.Metadata)
{
metadata[kvp.Key] = kvp.Value;
}
return new AttestorAuditDocument
{
Id = ObjectId.GenerateNewId(),
Timestamp = BsonDateTime.Create(record.Timestamp.UtcDateTime),
Action = record.Action,
Result = record.Result,
RekorUuid = record.RekorUuid,
Index = record.Index,
ArtifactSha256 = record.ArtifactSha256,
BundleSha256 = record.BundleSha256,
Backend = record.Backend,
LatencyMs = record.LatencyMs,
Caller = new CallerDocument
{
Subject = record.Caller.Subject,
Audience = record.Caller.Audience,
ClientId = record.Caller.ClientId,
MtlsThumbprint = record.Caller.MtlsThumbprint,
Tenant = record.Caller.Tenant
},
Metadata = metadata
};
}
internal sealed class CallerDocument
{
[BsonElement("subject")]
public string? Subject { get; set; }
[BsonElement("audience")]
public string? Audience { get; set; }
[BsonElement("clientId")]
public string? ClientId { get; set; }
[BsonElement("mtlsThumbprint")]
public string? MtlsThumbprint { get; set; }
[BsonElement("tenant")]
public string? Tenant { get; set; }
}
}
}

View File

@@ -1,111 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorDedupeStore : IAttestorDedupeStore
{
private readonly IMongoCollection<AttestorDedupeDocument> _collection;
private readonly TimeProvider _timeProvider;
private static int _indexesInitialized;
public MongoAttestorDedupeStore(
IMongoCollection<AttestorDedupeDocument> collection,
TimeProvider timeProvider)
{
_collection = collection;
_timeProvider = timeProvider;
EnsureIndexes();
}
public async Task<string?> TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
var key = BuildKey(bundleSha256);
var now = _timeProvider.GetUtcNow().UtcDateTime;
var filter = Builders<AttestorDedupeDocument>.Filter.Eq(x => x.Key, key);
var document = await _collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (document is null)
{
return null;
}
if (document.TtlAt <= now)
{
await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
return null;
}
return document.RekorUuid;
}
public Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow().UtcDateTime;
var expiresAt = now.Add(ttl);
var key = BuildKey(bundleSha256);
var filter = Builders<AttestorDedupeDocument>.Filter.Eq(x => x.Key, key);
var update = Builders<AttestorDedupeDocument>.Update
.SetOnInsert(x => x.Key, key)
.Set(x => x.RekorUuid, rekorUuid)
.Set(x => x.CreatedAt, now)
.Set(x => x.TtlAt, expiresAt);
return _collection.UpdateOneAsync(
filter,
update,
new UpdateOptions { IsUpsert = true },
cancellationToken);
}
private static string BuildKey(string bundleSha256) => $"bundle:{bundleSha256}";
private void EnsureIndexes()
{
if (Interlocked.Exchange(ref _indexesInitialized, 1) == 1)
{
return;
}
var indexes = new[]
{
new CreateIndexModel<AttestorDedupeDocument>(
Builders<AttestorDedupeDocument>.IndexKeys.Ascending(x => x.Key),
new CreateIndexOptions { Unique = true, Name = "dedupe_key_unique" }),
new CreateIndexModel<AttestorDedupeDocument>(
Builders<AttestorDedupeDocument>.IndexKeys.Ascending(x => x.TtlAt),
new CreateIndexOptions { ExpireAfter = TimeSpan.Zero, Name = "dedupe_ttl" })
};
_collection.Indexes.CreateMany(indexes);
}
[BsonIgnoreExtraElements]
internal sealed class AttestorDedupeDocument
{
[BsonId]
public ObjectId Id { get; set; }
[BsonElement("key")]
public string Key { get; set; } = string.Empty;
[BsonElement("rekorUuid")]
public string RekorUuid { get; set; } = string.Empty;
[BsonElement("createdAt")]
public DateTime CreatedAt { get; set; }
[BsonElement("ttlAt")]
public DateTime TtlAt { get; set; }
}
}

View File

@@ -1,609 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
{
private const int DefaultPageSize = 50;
private const int MaxPageSize = 200;
private readonly IMongoCollection<AttestorEntryDocument> _entries;
public MongoAttestorEntryRepository(IMongoCollection<AttestorEntryDocument> entries)
{
_entries = entries ?? throw new ArgumentNullException(nameof(entries));
EnsureIndexes();
}
public async Task<AttestorEntry?> GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.BundleSha256, bundleSha256);
var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain();
}
public async Task<AttestorEntry?> GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Id, rekorUuid);
var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain();
}
public async Task<IReadOnlyList<AttestorEntry>> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Artifact.Sha256, artifactSha256);
var documents = await _entries.Find(filter)
.Sort(Builders<AttestorEntryDocument>.Sort.Descending(x => x.CreatedAt))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.ConvertAll(static doc => doc.ToDomain());
}
public async Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entry);
var document = AttestorEntryDocument.FromDomain(entry);
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Id, document.Id);
await _entries.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
public async Task<AttestorEntryQueryResult> QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(query);
var pageSize = query.PageSize <= 0 ? DefaultPageSize : Math.Min(query.PageSize, MaxPageSize);
var filterBuilder = Builders<AttestorEntryDocument>.Filter;
var filter = filterBuilder.Empty;
if (!string.IsNullOrWhiteSpace(query.Subject))
{
var subject = query.Subject;
var subjectFilter = filterBuilder.Or(
filterBuilder.Eq(x => x.Artifact.Sha256, subject),
filterBuilder.Eq(x => x.Artifact.ImageDigest, subject),
filterBuilder.Eq(x => x.Artifact.SubjectUri, subject));
filter &= subjectFilter;
}
if (!string.IsNullOrWhiteSpace(query.Type))
{
filter &= filterBuilder.Eq(x => x.Artifact.Kind, query.Type);
}
if (!string.IsNullOrWhiteSpace(query.Issuer))
{
filter &= filterBuilder.Eq(x => x.SignerIdentity.SubjectAlternativeName, query.Issuer);
}
if (!string.IsNullOrWhiteSpace(query.Scope))
{
filter &= filterBuilder.Eq(x => x.SignerIdentity.Issuer, query.Scope);
}
if (query.CreatedAfter is { } createdAfter)
{
filter &= filterBuilder.Gte(x => x.CreatedAt, createdAfter.UtcDateTime);
}
if (query.CreatedBefore is { } createdBefore)
{
filter &= filterBuilder.Lte(x => x.CreatedAt, createdBefore.UtcDateTime);
}
if (!string.IsNullOrWhiteSpace(query.ContinuationToken))
{
if (!AttestorEntryContinuationToken.TryParse(query.ContinuationToken, out var cursor))
{
throw new FormatException("Invalid continuation token.");
}
var cursorInstant = cursor.CreatedAt.UtcDateTime;
var continuationFilter = filterBuilder.Or(
filterBuilder.Lt(x => x.CreatedAt, cursorInstant),
filterBuilder.And(
filterBuilder.Eq(x => x.CreatedAt, cursorInstant),
filterBuilder.Gt(x => x.Id, cursor.RekorUuid)));
filter &= continuationFilter;
}
var sort = Builders<AttestorEntryDocument>.Sort
.Descending(x => x.CreatedAt)
.Ascending(x => x.Id);
var documents = await _entries.Find(filter)
.Sort(sort)
.Limit(pageSize + 1)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
string? continuation = null;
if (documents.Count > pageSize)
{
var cursorDocument = documents[pageSize];
var nextCreatedAt = DateTime.SpecifyKind(cursorDocument.CreatedAt, DateTimeKind.Utc);
continuation = AttestorEntryContinuationToken.Encode(new DateTimeOffset(nextCreatedAt), cursorDocument.Id);
documents.RemoveRange(pageSize, documents.Count - pageSize);
}
var items = documents.ConvertAll(static doc => doc.ToDomain());
return new AttestorEntryQueryResult
{
Items = items,
ContinuationToken = continuation
};
}
private void EnsureIndexes()
{
var keys = Builders<AttestorEntryDocument>.IndexKeys;
var models = new[]
{
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.BundleSha256),
new CreateIndexOptions { Name = "bundle_sha_unique", Unique = true }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Descending(x => x.CreatedAt).Ascending(x => x.Id),
new CreateIndexOptions { Name = "created_at_uuid" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.Artifact.Sha256),
new CreateIndexOptions { Name = "artifact_sha" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.Artifact.ImageDigest),
new CreateIndexOptions { Name = "artifact_image_digest" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.Artifact.SubjectUri),
new CreateIndexOptions { Name = "artifact_subject_uri" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.SignerIdentity.Issuer)
.Ascending(x => x.Artifact.Kind)
.Descending(x => x.CreatedAt)
.Ascending(x => x.Id),
new CreateIndexOptions { Name = "scope_kind_created_at" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.SignerIdentity.SubjectAlternativeName),
new CreateIndexOptions { Name = "issuer_san" })
};
_entries.Indexes.CreateMany(models);
}
[BsonIgnoreExtraElements]
internal sealed class AttestorEntryDocument
{
[BsonId]
public string Id { get; set; } = string.Empty;
[BsonElement("artifact")]
public ArtifactDocument Artifact { get; set; } = new();
[BsonElement("bundleSha256")]
public string BundleSha256 { get; set; } = string.Empty;
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("witness")]
public WitnessDocument? Witness { get; set; }
[BsonElement("log")]
public LogDocument Log { get; set; } = new();
[BsonElement("createdAt")]
[BsonDateTimeOptions(Kind = DateTimeKind.Utc)]
public DateTime CreatedAt { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("signer")]
public SignerIdentityDocument SignerIdentity { get; set; } = new();
[BsonElement("mirror")]
public MirrorDocument? Mirror { get; set; }
public static AttestorEntryDocument FromDomain(AttestorEntry entry)
{
ArgumentNullException.ThrowIfNull(entry);
return new AttestorEntryDocument
{
Id = entry.RekorUuid,
Artifact = ArtifactDocument.FromDomain(entry.Artifact),
BundleSha256 = entry.BundleSha256,
Index = entry.Index,
Proof = ProofDocument.FromDomain(entry.Proof),
Witness = WitnessDocument.FromDomain(entry.Witness),
Log = LogDocument.FromDomain(entry.Log),
CreatedAt = entry.CreatedAt.UtcDateTime,
Status = entry.Status,
SignerIdentity = SignerIdentityDocument.FromDomain(entry.SignerIdentity),
Mirror = MirrorDocument.FromDomain(entry.Mirror)
};
}
public AttestorEntry ToDomain()
{
var createdAtUtc = DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc);
return new AttestorEntry
{
RekorUuid = Id,
Artifact = Artifact.ToDomain(),
BundleSha256 = BundleSha256,
Index = Index,
Proof = Proof?.ToDomain(),
Witness = Witness?.ToDomain(),
Log = Log.ToDomain(),
CreatedAt = new DateTimeOffset(createdAtUtc),
Status = Status,
SignerIdentity = SignerIdentity.ToDomain(),
Mirror = Mirror?.ToDomain()
};
}
}
internal sealed class ArtifactDocument
{
[BsonElement("sha256")]
public string Sha256 { get; set; } = string.Empty;
[BsonElement("kind")]
public string Kind { get; set; } = string.Empty;
[BsonElement("imageDigest")]
public string? ImageDigest { get; set; }
[BsonElement("subjectUri")]
public string? SubjectUri { get; set; }
public static ArtifactDocument FromDomain(AttestorEntry.ArtifactDescriptor artifact)
{
ArgumentNullException.ThrowIfNull(artifact);
return new ArtifactDocument
{
Sha256 = artifact.Sha256,
Kind = artifact.Kind,
ImageDigest = artifact.ImageDigest,
SubjectUri = artifact.SubjectUri
};
}
public AttestorEntry.ArtifactDescriptor ToDomain()
{
return new AttestorEntry.ArtifactDescriptor
{
Sha256 = Sha256,
Kind = Kind,
ImageDigest = ImageDigest,
SubjectUri = SubjectUri
};
}
}
internal sealed class ProofDocument
{
[BsonElement("checkpoint")]
public CheckpointDocument? Checkpoint { get; set; }
[BsonElement("inclusion")]
public InclusionDocument? Inclusion { get; set; }
public static ProofDocument? FromDomain(AttestorEntry.ProofDescriptor? proof)
{
if (proof is null)
{
return null;
}
return new ProofDocument
{
Checkpoint = CheckpointDocument.FromDomain(proof.Checkpoint),
Inclusion = InclusionDocument.FromDomain(proof.Inclusion)
};
}
public AttestorEntry.ProofDescriptor ToDomain()
{
return new AttestorEntry.ProofDescriptor
{
Checkpoint = Checkpoint?.ToDomain(),
Inclusion = Inclusion?.ToDomain()
};
}
}
internal sealed class WitnessDocument
{
[BsonElement("aggregator")]
public string? Aggregator { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "unknown";
[BsonElement("rootHash")]
public string? RootHash { get; set; }
[BsonElement("retrievedAt")]
[BsonDateTimeOptions(Kind = DateTimeKind.Utc)]
public DateTime RetrievedAt { get; set; }
[BsonElement("statement")]
public string? Statement { get; set; }
[BsonElement("signature")]
public string? Signature { get; set; }
[BsonElement("keyId")]
public string? KeyId { get; set; }
[BsonElement("error")]
public string? Error { get; set; }
public static WitnessDocument? FromDomain(AttestorEntry.WitnessDescriptor? witness)
{
if (witness is null)
{
return null;
}
return new WitnessDocument
{
Aggregator = witness.Aggregator,
Status = witness.Status,
RootHash = witness.RootHash,
RetrievedAt = witness.RetrievedAt.UtcDateTime,
Statement = witness.Statement,
Signature = witness.Signature,
KeyId = witness.KeyId,
Error = witness.Error
};
}
public AttestorEntry.WitnessDescriptor ToDomain()
{
return new AttestorEntry.WitnessDescriptor
{
Aggregator = Aggregator ?? string.Empty,
Status = string.IsNullOrWhiteSpace(Status) ? "unknown" : Status,
RootHash = RootHash,
RetrievedAt = new DateTimeOffset(DateTime.SpecifyKind(RetrievedAt, DateTimeKind.Utc)),
Statement = Statement,
Signature = Signature,
KeyId = KeyId,
Error = Error
};
}
}
internal sealed class CheckpointDocument
{
[BsonElement("origin")]
public string? Origin { get; set; }
[BsonElement("size")]
public long Size { get; set; }
[BsonElement("rootHash")]
public string? RootHash { get; set; }
[BsonElement("timestamp")]
[BsonDateTimeOptions(Kind = DateTimeKind.Utc)]
public DateTime? Timestamp { get; set; }
public static CheckpointDocument? FromDomain(AttestorEntry.CheckpointDescriptor? checkpoint)
{
if (checkpoint is null)
{
return null;
}
return new CheckpointDocument
{
Origin = checkpoint.Origin,
Size = checkpoint.Size,
RootHash = checkpoint.RootHash,
Timestamp = checkpoint.Timestamp?.UtcDateTime
};
}
public AttestorEntry.CheckpointDescriptor ToDomain()
{
return new AttestorEntry.CheckpointDescriptor
{
Origin = Origin,
Size = Size,
RootHash = RootHash,
Timestamp = Timestamp is null ? null : new DateTimeOffset(DateTime.SpecifyKind(Timestamp.Value, DateTimeKind.Utc))
};
}
}
internal sealed class InclusionDocument
{
[BsonElement("leafHash")]
public string? LeafHash { get; set; }
[BsonElement("path")]
public IReadOnlyList<string> Path { get; set; } = Array.Empty<string>();
public static InclusionDocument? FromDomain(AttestorEntry.InclusionDescriptor? inclusion)
{
if (inclusion is null)
{
return null;
}
return new InclusionDocument
{
LeafHash = inclusion.LeafHash,
Path = inclusion.Path
};
}
public AttestorEntry.InclusionDescriptor ToDomain()
{
return new AttestorEntry.InclusionDescriptor
{
LeafHash = LeafHash,
Path = Path
};
}
}
internal sealed class LogDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = "primary";
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("logId")]
public string? LogId { get; set; }
public static LogDocument FromDomain(AttestorEntry.LogDescriptor log)
{
ArgumentNullException.ThrowIfNull(log);
return new LogDocument
{
Backend = log.Backend,
Url = log.Url,
LogId = log.LogId
};
}
public AttestorEntry.LogDescriptor ToDomain()
{
return new AttestorEntry.LogDescriptor
{
Backend = Backend,
Url = Url,
LogId = LogId
};
}
}
internal sealed class SignerIdentityDocument
{
[BsonElement("mode")]
public string Mode { get; set; } = string.Empty;
[BsonElement("issuer")]
public string? Issuer { get; set; }
[BsonElement("san")]
public string? SubjectAlternativeName { get; set; }
[BsonElement("kid")]
public string? KeyId { get; set; }
public static SignerIdentityDocument FromDomain(AttestorEntry.SignerIdentityDescriptor signer)
{
ArgumentNullException.ThrowIfNull(signer);
return new SignerIdentityDocument
{
Mode = signer.Mode,
Issuer = signer.Issuer,
SubjectAlternativeName = signer.SubjectAlternativeName,
KeyId = signer.KeyId
};
}
public AttestorEntry.SignerIdentityDescriptor ToDomain()
{
return new AttestorEntry.SignerIdentityDescriptor
{
Mode = Mode,
Issuer = Issuer,
SubjectAlternativeName = SubjectAlternativeName,
KeyId = KeyId
};
}
}
internal sealed class MirrorDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = string.Empty;
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("uuid")]
public string? Uuid { get; set; }
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("witness")]
public WitnessDocument? Witness { get; set; }
[BsonElement("logId")]
public string? LogId { get; set; }
[BsonElement("error")]
public string? Error { get; set; }
public static MirrorDocument? FromDomain(AttestorEntry.LogReplicaDescriptor? mirror)
{
if (mirror is null)
{
return null;
}
return new MirrorDocument
{
Backend = mirror.Backend,
Url = mirror.Url,
Uuid = mirror.Uuid,
Index = mirror.Index,
Status = mirror.Status,
Proof = ProofDocument.FromDomain(mirror.Proof),
Witness = WitnessDocument.FromDomain(mirror.Witness),
LogId = mirror.LogId,
Error = mirror.Error
};
}
public AttestorEntry.LogReplicaDescriptor ToDomain()
{
return new AttestorEntry.LogReplicaDescriptor
{
Backend = Backend,
Url = Url,
Uuid = Uuid,
Index = Index,
Status = Status,
Proof = Proof?.ToDomain(),
Witness = Witness?.ToDomain(),
LogId = LogId,
Error = Error
};
}
}
}

View File

@@ -22,7 +22,6 @@ using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Options;
using Microsoft.AspNetCore.TestHost;
using MongoDB.Driver;
using StackExchange.Redis;
using StellaOps.Attestor.Core.Offline;
using StellaOps.Attestor.Core.Storage;

View File

@@ -1,9 +1,8 @@
#if false
using System;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Driver;
using StackExchange.Redis;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Infrastructure.Storage;
@@ -15,54 +14,6 @@ public sealed class LiveDedupeStoreTests
{
private const string Category = "LiveTTL";
[Fact]
[Trait("Category", Category)]
public async Task Mongo_dedupe_document_expires_via_ttl_index()
{
var mongoUri = Environment.GetEnvironmentVariable("ATTESTOR_LIVE_MONGO_URI");
if (string.IsNullOrWhiteSpace(mongoUri))
{
return;
}
var mongoUrl = new MongoUrl(mongoUri);
var client = new MongoClient(mongoUrl);
var databaseName = $"{(string.IsNullOrWhiteSpace(mongoUrl.DatabaseName) ? "attestor_live_ttl" : mongoUrl.DatabaseName)}_{Guid.NewGuid():N}";
var database = client.GetDatabase(databaseName);
var collection = database.GetCollection<MongoAttestorDedupeStore.AttestorDedupeDocument>("dedupe");
try
{
var store = new MongoAttestorDedupeStore(collection, TimeProvider.System);
var indexes = await (await collection.Indexes.ListAsync()).ToListAsync();
Assert.Contains(indexes, doc => doc.TryGetElement("name", out var element) && element.Value == "dedupe_ttl");
var bundle = Guid.NewGuid().ToString("N");
var ttl = TimeSpan.FromSeconds(20);
await store.SetAsync(bundle, "rekor-live", ttl);
var filter = Builders<MongoAttestorDedupeStore.AttestorDedupeDocument>.Filter.Eq(x => x.Key, $"bundle:{bundle}");
Assert.True(await collection.Find(filter).AnyAsync(), "Seed document was not written.");
var deadline = DateTime.UtcNow + ttl + TimeSpan.FromMinutes(2);
while (DateTime.UtcNow < deadline)
{
if (!await collection.Find(filter).AnyAsync())
{
return;
}
await Task.Delay(TimeSpan.FromSeconds(5));
}
throw new TimeoutException("TTL document remained in MongoDB after waiting for expiry.");
}
finally
{
await client.DropDatabaseAsync(databaseName);
}
}
[Fact]
[Trait("Category", Category)]
public async Task Redis_dedupe_entry_sets_time_to_live()
@@ -106,5 +57,5 @@ public sealed class LiveDedupeStoreTests
await multiplexer.DisposeAsync();
}
}
}
#endif

View File

@@ -9,7 +9,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" />
<PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.12.0" />
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.12.0" />
@@ -28,4 +27,4 @@
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -57,6 +57,7 @@ internal static class CommandFactory
root.Add(BuildVulnCommand(services, verboseOption, cancellationToken));
root.Add(BuildVexCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildCryptoCommand(services, verboseOption, cancellationToken));
root.Add(BuildExportCommand(services, verboseOption, cancellationToken));
root.Add(BuildAttestCommand(services, verboseOption, cancellationToken));
root.Add(BuildRiskProfileCommand(verboseOption, cancellationToken));
root.Add(BuildAdvisoryCommand(services, verboseOption, cancellationToken));
@@ -8713,6 +8714,261 @@ internal static class CommandFactory
return sbom;
}
private static Command BuildExportCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var export = new Command("export", "Manage export profiles and runs.");
var jsonOption = new Option<bool>("--json")
{
Description = "Emit output in JSON."
};
var profiles = new Command("profiles", "Manage export profiles.");
var profilesList = new Command("list", "List export profiles.");
var profileLimitOption = new Option<int?>("--limit")
{
Description = "Maximum number of profiles to return."
};
var profileCursorOption = new Option<string?>("--cursor")
{
Description = "Pagination cursor."
};
profilesList.Add(profileLimitOption);
profilesList.Add(profileCursorOption);
profilesList.Add(jsonOption);
profilesList.Add(verboseOption);
profilesList.SetAction((parseResult, _) =>
{
var limit = parseResult.GetValue(profileLimitOption);
var cursor = parseResult.GetValue(profileCursorOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportProfilesListAsync(
services,
limit,
cursor,
json,
verbose,
cancellationToken);
});
var profilesShow = new Command("show", "Show export profile details.");
var profileIdArg = new Argument<string>("profile-id")
{
Description = "Export profile identifier."
};
profilesShow.Add(profileIdArg);
profilesShow.Add(jsonOption);
profilesShow.Add(verboseOption);
profilesShow.SetAction((parseResult, _) =>
{
var profileId = parseResult.GetValue(profileIdArg) ?? string.Empty;
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportProfileShowAsync(
services,
profileId,
json,
verbose,
cancellationToken);
});
profiles.Add(profilesList);
profiles.Add(profilesShow);
export.Add(profiles);
var runs = new Command("runs", "Manage export runs.");
var runsList = new Command("list", "List export runs.");
var runProfileOption = new Option<string?>("--profile-id")
{
Description = "Filter runs by profile ID."
};
var runLimitOption = new Option<int?>("--limit")
{
Description = "Maximum number of runs to return."
};
var runCursorOption = new Option<string?>("--cursor")
{
Description = "Pagination cursor."
};
runsList.Add(runProfileOption);
runsList.Add(runLimitOption);
runsList.Add(runCursorOption);
runsList.Add(jsonOption);
runsList.Add(verboseOption);
runsList.SetAction((parseResult, _) =>
{
var profileId = parseResult.GetValue(runProfileOption);
var limit = parseResult.GetValue(runLimitOption);
var cursor = parseResult.GetValue(runCursorOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportRunsListAsync(
services,
profileId,
limit,
cursor,
json,
verbose,
cancellationToken);
});
var runIdArg = new Argument<string>("run-id")
{
Description = "Export run identifier."
};
var runsShow = new Command("show", "Show export run details.");
runsShow.Add(runIdArg);
runsShow.Add(jsonOption);
runsShow.Add(verboseOption);
runsShow.SetAction((parseResult, _) =>
{
var runId = parseResult.GetValue(runIdArg) ?? string.Empty;
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportRunShowAsync(
services,
runId,
json,
verbose,
cancellationToken);
});
var runsDownload = new Command("download", "Download an export bundle for a run.");
runsDownload.Add(runIdArg);
var runOutputOption = new Option<string>("--output", new[] { "-o" })
{
Description = "Path to write the export bundle.",
IsRequired = true
};
var runOverwriteOption = new Option<bool>("--overwrite")
{
Description = "Overwrite output file if it exists."
};
var runVerifyHashOption = new Option<string?>("--verify-hash")
{
Description = "Optional SHA256 hash to verify after download."
};
var runTypeOption = new Option<string>("--type")
{
Description = "Run type: evidence (default) or attestation."
};
runTypeOption.SetDefaultValue("evidence");
runsDownload.Add(runOutputOption);
runsDownload.Add(runOverwriteOption);
runsDownload.Add(runVerifyHashOption);
runsDownload.Add(runTypeOption);
runsDownload.Add(verboseOption);
runsDownload.SetAction((parseResult, _) =>
{
var runId = parseResult.GetValue(runIdArg) ?? string.Empty;
var output = parseResult.GetValue(runOutputOption) ?? string.Empty;
var overwrite = parseResult.GetValue(runOverwriteOption);
var verifyHash = parseResult.GetValue(runVerifyHashOption);
var runType = parseResult.GetValue(runTypeOption) ?? "evidence";
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportRunDownloadAsync(
services,
runId,
output,
overwrite,
verifyHash,
runType,
verbose,
cancellationToken);
});
runs.Add(runsList);
runs.Add(runsShow);
runs.Add(runsDownload);
export.Add(runs);
var start = new Command("start", "Start export jobs.");
var startProfileOption = new Option<string>("--profile-id")
{
Description = "Export profile identifier.",
IsRequired = true
};
var startSelectorOption = new Option<string[]?>("--selector", new[] { "-s" })
{
Description = "Selector key=value filters (repeatable).",
AllowMultipleArgumentsPerToken = true
};
var startCallbackOption = new Option<string?>("--callback-url")
{
Description = "Optional callback URL for completion notifications."
};
var startEvidence = new Command("evidence", "Start an evidence export run.");
startEvidence.Add(startProfileOption);
startEvidence.Add(startSelectorOption);
startEvidence.Add(startCallbackOption);
startEvidence.Add(jsonOption);
startEvidence.Add(verboseOption);
startEvidence.SetAction((parseResult, _) =>
{
var profileId = parseResult.GetValue(startProfileOption) ?? string.Empty;
var selectors = parseResult.GetValue(startSelectorOption);
var callback = parseResult.GetValue(startCallbackOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportStartEvidenceAsync(
services,
profileId,
selectors,
callback,
json,
verbose,
cancellationToken);
});
var startAttestation = new Command("attestation", "Start an attestation export run.");
startAttestation.Add(startProfileOption);
startAttestation.Add(startSelectorOption);
var startTransparencyOption = new Option<bool>("--include-transparency")
{
Description = "Include transparency log entries."
};
startAttestation.Add(startTransparencyOption);
startAttestation.Add(startCallbackOption);
startAttestation.Add(jsonOption);
startAttestation.Add(verboseOption);
startAttestation.SetAction((parseResult, _) =>
{
var profileId = parseResult.GetValue(startProfileOption) ?? string.Empty;
var selectors = parseResult.GetValue(startSelectorOption);
var includeTransparency = parseResult.GetValue(startTransparencyOption);
var callback = parseResult.GetValue(startCallbackOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExportStartAttestationAsync(
services,
profileId,
selectors,
includeTransparency,
callback,
json,
verbose,
cancellationToken);
});
start.Add(startEvidence);
start.Add(startAttestation);
export.Add(start);
return export;
}
// CLI-PARITY-41-002: Notify command group
private static Command BuildNotifyCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
@@ -9038,6 +9294,79 @@ internal static class CommandFactory
notify.Add(deliveries);
// notify simulate
var simulate = new Command("simulate", "Simulate notification rules against events.");
var simulateEventsFileOption = new Option<string?>("--events-file")
{
Description = "Path to JSON file containing events array for simulation."
};
var simulateRulesFileOption = new Option<string?>("--rules-file")
{
Description = "Optional JSON file containing rules array to evaluate (overrides server rules)."
};
var simulateEnabledOnlyOption = new Option<bool>("--enabled-only")
{
Description = "Only evaluate enabled rules."
};
var simulateLookbackOption = new Option<int?>("--lookback-minutes")
{
Description = "Historical lookback window for events."
};
var simulateMaxEventsOption = new Option<int?>("--max-events")
{
Description = "Maximum events to evaluate."
};
var simulateEventKindOption = new Option<string?>("--event-kind")
{
Description = "Filter simulation to a specific event kind."
};
var simulateIncludeNonMatchesOption = new Option<bool>("--include-non-matches")
{
Description = "Include non-match explanations."
};
simulate.Add(tenantOption);
simulate.Add(simulateEventsFileOption);
simulate.Add(simulateRulesFileOption);
simulate.Add(simulateEnabledOnlyOption);
simulate.Add(simulateLookbackOption);
simulate.Add(simulateMaxEventsOption);
simulate.Add(simulateEventKindOption);
simulate.Add(simulateIncludeNonMatchesOption);
simulate.Add(jsonOption);
simulate.Add(verboseOption);
simulate.SetAction((parseResult, _) =>
{
var tenant = parseResult.GetValue(tenantOption);
var eventsFile = parseResult.GetValue(simulateEventsFileOption);
var rulesFile = parseResult.GetValue(simulateRulesFileOption);
var enabledOnly = parseResult.GetValue(simulateEnabledOnlyOption);
var lookback = parseResult.GetValue(simulateLookbackOption);
var maxEvents = parseResult.GetValue(simulateMaxEventsOption);
var eventKind = parseResult.GetValue(simulateEventKindOption);
var includeNonMatches = parseResult.GetValue(simulateIncludeNonMatchesOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleNotifySimulateAsync(
services,
tenant,
eventsFile,
rulesFile,
enabledOnly,
lookback,
maxEvents,
eventKind,
includeNonMatches,
json,
verbose,
cancellationToken);
});
notify.Add(simulate);
// notify send
var send = new Command("send", "Send a notification.");
@@ -9112,6 +9441,61 @@ internal static class CommandFactory
notify.Add(send);
// notify ack
var ack = new Command("ack", "Acknowledge a notification or incident.");
var ackTenantOption = new Option<string?>("--tenant")
{
Description = "Tenant identifier (header)."
};
var ackIncidentOption = new Option<string?>("--incident-id")
{
Description = "Incident identifier to acknowledge."
};
var ackTokenOption = new Option<string?>("--token")
{
Description = "Signed acknowledgment token."
};
var ackByOption = new Option<string?>("--by")
{
Description = "Actor performing the acknowledgment."
};
var ackCommentOption = new Option<string?>("--comment")
{
Description = "Optional acknowledgment comment."
};
ack.Add(ackTenantOption);
ack.Add(ackIncidentOption);
ack.Add(ackTokenOption);
ack.Add(ackByOption);
ack.Add(ackCommentOption);
ack.Add(jsonOption);
ack.Add(verboseOption);
ack.SetAction((parseResult, _) =>
{
var tenant = parseResult.GetValue(ackTenantOption);
var incidentId = parseResult.GetValue(ackIncidentOption);
var token = parseResult.GetValue(ackTokenOption);
var by = parseResult.GetValue(ackByOption);
var comment = parseResult.GetValue(ackCommentOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleNotifyAckAsync(
services,
tenant,
incidentId,
token,
by,
comment,
json,
verbose,
cancellationToken);
});
notify.Add(ack);
return notify;
}
@@ -10682,4 +11066,3 @@ internal static class CommandFactory
return devportal;
}
}

View File

@@ -23,6 +23,8 @@ using Microsoft.Extensions.Options;
using Spectre.Console;
using Spectre.Console.Rendering;
using StellaOps.Auth.Client;
using StellaOps.ExportCenter.Client;
using StellaOps.ExportCenter.Client.Models;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Output;
using StellaOps.Cli.Prompts;
@@ -24774,8 +24776,485 @@ stella policy test {policyName}.stella
#endregion
#region Export Handlers (CLI-EXPORT-35-037)
internal static async Task<int> HandleExportProfilesListAsync(
IServiceProvider services,
int? limit,
string? cursor,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
var response = await client.ListProfilesAsync(cursor, limit, cancellationToken).ConfigureAwait(false);
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(response, JsonOptions));
return 0;
}
if (response.Profiles.Count == 0)
{
AnsiConsole.MarkupLine("[yellow]No export profiles found.[/]");
return 0;
}
var table = new Table();
table.AddColumn("Profile ID");
table.AddColumn("Name");
table.AddColumn("Adapter");
table.AddColumn("Format");
table.AddColumn("Signing");
table.AddColumn("Created");
table.AddColumn("Updated");
foreach (var profile in response.Profiles)
{
table.AddRow(
Markup.Escape(profile.ProfileId),
Markup.Escape(profile.Name),
Markup.Escape(profile.Adapter),
Markup.Escape(profile.OutputFormat),
profile.SigningEnabled ? "[green]Yes[/]" : "[grey]No[/]",
profile.CreatedAt.ToString("u", CultureInfo.InvariantCulture),
profile.UpdatedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "[grey]-[/]");
}
AnsiConsole.Write(table);
return 0;
}
internal static async Task<int> HandleExportProfileShowAsync(
IServiceProvider services,
string profileId,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
var profile = await client.GetProfileAsync(profileId, cancellationToken).ConfigureAwait(false);
if (profile is null)
{
AnsiConsole.MarkupLine($"[red]Profile not found:[/] {Markup.Escape(profileId)}");
return 1;
}
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(profile, JsonOptions));
return 0;
}
var profileTable = new Table { Border = TableBorder.Rounded };
profileTable.AddColumn("Field");
profileTable.AddColumn("Value");
profileTable.AddRow("Profile ID", Markup.Escape(profile.ProfileId));
profileTable.AddRow("Name", Markup.Escape(profile.Name));
profileTable.AddRow("Description", string.IsNullOrWhiteSpace(profile.Description) ? "[grey]-[/]" : Markup.Escape(profile.Description));
profileTable.AddRow("Adapter", Markup.Escape(profile.Adapter));
profileTable.AddRow("Format", Markup.Escape(profile.OutputFormat));
profileTable.AddRow("Signing", profile.SigningEnabled ? "[green]Enabled[/]" : "[grey]Disabled[/]");
profileTable.AddRow("Created", profile.CreatedAt.ToString("u", CultureInfo.InvariantCulture));
profileTable.AddRow("Updated", profile.UpdatedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "[grey]-[/]");
if (profile.Selectors is { Count: > 0 })
{
var selectorTable = new Table { Title = new TableTitle("Selectors") };
selectorTable.AddColumn("Key");
selectorTable.AddColumn("Value");
foreach (var selector in profile.Selectors)
{
selectorTable.AddRow(Markup.Escape(selector.Key), Markup.Escape(selector.Value));
}
AnsiConsole.Write(profileTable);
AnsiConsole.WriteLine();
AnsiConsole.Write(selectorTable);
}
else
{
AnsiConsole.Write(profileTable);
}
return 0;
}
internal static async Task<int> HandleExportRunsListAsync(
IServiceProvider services,
string? profileId,
int? limit,
string? cursor,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
var response = await client.ListRunsAsync(profileId, cursor, limit, cancellationToken).ConfigureAwait(false);
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(response, JsonOptions));
return 0;
}
if (response.Runs.Count == 0)
{
AnsiConsole.MarkupLine("[yellow]No export runs found.[/]");
return 0;
}
var table = new Table();
table.AddColumn("Run ID");
table.AddColumn("Profile");
table.AddColumn("Status");
table.AddColumn("Progress");
table.AddColumn("Started");
table.AddColumn("Completed");
table.AddColumn("Bundle");
foreach (var run in response.Runs)
{
table.AddRow(
Markup.Escape(run.RunId),
Markup.Escape(run.ProfileId),
Markup.Escape(run.Status),
run.Progress.HasValue ? $"{run.Progress.Value}%" : "[grey]-[/]",
run.StartedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "[grey]-[/]",
run.CompletedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "[grey]-[/]",
string.IsNullOrWhiteSpace(run.BundleHash) ? "[grey]-[/]" : Markup.Escape(run.BundleHash));
}
AnsiConsole.Write(table);
if (response.HasMore && !string.IsNullOrWhiteSpace(response.ContinuationToken))
{
AnsiConsole.MarkupLine($"[yellow]More available. Use --cursor {Markup.Escape(response.ContinuationToken)}[/]");
}
return 0;
}
internal static async Task<int> HandleExportRunShowAsync(
IServiceProvider services,
string runId,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
var run = await client.GetRunAsync(runId, cancellationToken).ConfigureAwait(false);
if (run is null)
{
AnsiConsole.MarkupLine($"[red]Run not found:[/] {Markup.Escape(runId)}");
return 1;
}
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(run, JsonOptions));
return 0;
}
var table = new Table { Border = TableBorder.Rounded };
table.AddColumn("Field");
table.AddColumn("Value");
table.AddRow("Run ID", Markup.Escape(run.RunId));
table.AddRow("Profile ID", Markup.Escape(run.ProfileId));
table.AddRow("Status", Markup.Escape(run.Status));
table.AddRow("Progress", run.Progress.HasValue ? $"{run.Progress.Value}%" : "[grey]-[/]");
table.AddRow("Started", run.StartedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "[grey]-[/]");
table.AddRow("Completed", run.CompletedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "[grey]-[/]");
table.AddRow("Bundle Hash", string.IsNullOrWhiteSpace(run.BundleHash) ? "[grey]-[/]" : Markup.Escape(run.BundleHash));
table.AddRow("Bundle URL", string.IsNullOrWhiteSpace(run.BundleUrl) ? "[grey]-[/]" : Markup.Escape(run.BundleUrl));
table.AddRow("Error Code", string.IsNullOrWhiteSpace(run.ErrorCode) ? "[grey]-[/]" : Markup.Escape(run.ErrorCode));
table.AddRow("Error Message", string.IsNullOrWhiteSpace(run.ErrorMessage) ? "[grey]-[/]" : Markup.Escape(run.ErrorMessage));
AnsiConsole.Write(table);
return 0;
}
internal static async Task<int> HandleExportRunDownloadAsync(
IServiceProvider services,
string runId,
string outputPath,
bool overwrite,
string? verifyHash,
string runType,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
if (File.Exists(outputPath) && !overwrite)
{
AnsiConsole.MarkupLine($"[red]Output file already exists:[/] {Markup.Escape(outputPath)} (use --overwrite to replace)");
return 1;
}
Directory.CreateDirectory(Path.GetDirectoryName(Path.GetFullPath(outputPath)) ?? ".");
Stream? stream = null;
if (string.Equals(runType, "attestation", StringComparison.OrdinalIgnoreCase))
{
stream = await client.DownloadAttestationExportAsync(runId, cancellationToken).ConfigureAwait(false);
}
else
{
stream = await client.DownloadEvidenceExportAsync(runId, cancellationToken).ConfigureAwait(false);
}
if (stream is null)
{
AnsiConsole.MarkupLine($"[red]Export bundle not available for run:[/] {Markup.Escape(runId)}");
return 1;
}
await using (stream)
await using (var fileStream = File.Create(outputPath))
{
await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(verifyHash))
{
await using var file = File.OpenRead(outputPath);
var hash = await SHA256.HashDataAsync(file, cancellationToken).ConfigureAwait(false);
var hashString = Convert.ToHexString(hash).ToLowerInvariant();
if (!string.Equals(hashString, verifyHash.Trim(), StringComparison.OrdinalIgnoreCase))
{
AnsiConsole.MarkupLine($"[red]Hash verification failed.[/] expected={Markup.Escape(verifyHash)}, actual={hashString}");
return 1;
}
}
AnsiConsole.MarkupLine($"[green]Bundle written to[/] {Markup.Escape(outputPath)}");
return 0;
}
internal static async Task<int> HandleExportStartEvidenceAsync(
IServiceProvider services,
string profileId,
string[]? selectors,
string? callbackUrl,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
var selectorMap = ParseSelectorMap(selectors);
var request = new CreateEvidenceExportRequest(profileId, selectorMap, callbackUrl);
var response = await client.CreateEvidenceExportAsync(request, cancellationToken).ConfigureAwait(false);
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(response, JsonOptions));
return 0;
}
AnsiConsole.MarkupLine($"[green]Export started.[/] runId={Markup.Escape(response.RunId)} status={Markup.Escape(response.Status)}");
return 0;
}
internal static async Task<int> HandleExportStartAttestationAsync(
IServiceProvider services,
string profileId,
string[]? selectors,
bool includeTransparencyLog,
string? callbackUrl,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<IExportCenterClient>();
var selectorMap = ParseSelectorMap(selectors);
var request = new CreateAttestationExportRequest(profileId, selectorMap, includeTransparencyLog, callbackUrl);
var response = await client.CreateAttestationExportAsync(request, cancellationToken).ConfigureAwait(false);
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(response, JsonOptions));
return 0;
}
AnsiConsole.MarkupLine($"[green]Attestation export started.[/] runId={Markup.Escape(response.RunId)} status={Markup.Escape(response.Status)}");
return 0;
}
private static IReadOnlyDictionary<string, string>? ParseSelectorMap(string[]? selectors)
{
if (selectors is null || selectors.Length == 0)
{
return null;
}
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
foreach (var selector in selectors)
{
if (string.IsNullOrWhiteSpace(selector))
{
continue;
}
var parts = selector.Split('=', 2, StringSplitOptions.TrimEntries);
if (parts.Length != 2 || string.IsNullOrWhiteSpace(parts[0]) || string.IsNullOrWhiteSpace(parts[1]))
{
AnsiConsole.MarkupLine($"[yellow]Ignoring selector with invalid format (expected key=value):[/] {Markup.Escape(selector)}");
continue;
}
result[parts[0]] = parts[1];
}
return result.Count == 0 ? null : result;
}
#endregion
#region Notify Handlers (CLI-PARITY-41-002)
internal static async Task<int> HandleNotifySimulateAsync(
IServiceProvider services,
string? tenant,
string? eventsFile,
string? rulesFile,
bool enabledOnly,
int? lookbackMinutes,
int? maxEvents,
string? eventKind,
bool includeNonMatches,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<INotifyClient>();
var eventsPayload = LoadJsonElement(eventsFile);
var rulesPayload = LoadJsonElement(rulesFile);
var request = new NotifySimulationRequest
{
TenantId = tenant,
Events = eventsPayload,
Rules = rulesPayload,
EnabledRulesOnly = enabledOnly,
HistoricalLookbackMinutes = lookbackMinutes,
MaxEvents = maxEvents,
EventKindFilter = eventKind,
IncludeNonMatches = includeNonMatches
};
var result = await client.SimulateAsync(request, cancellationToken).ConfigureAwait(false);
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(result, JsonOptions));
return 0;
}
AnsiConsole.MarkupLine(result.SimulationId is null
? "[yellow]Simulation completed.[/]"
: $"[green]Simulation {Markup.Escape(result.SimulationId)} completed.[/]");
var table = new Table();
table.AddColumn("Total Events");
table.AddColumn("Total Rules");
table.AddColumn("Matched Events");
table.AddColumn("Actions");
table.AddColumn("Duration (ms)");
table.AddRow(
(result.TotalEvents ?? 0).ToString(CultureInfo.InvariantCulture),
(result.TotalRules ?? 0).ToString(CultureInfo.InvariantCulture),
(result.MatchedEvents ?? 0).ToString(CultureInfo.InvariantCulture),
(result.TotalActionsTriggered ?? 0).ToString(CultureInfo.InvariantCulture),
result.DurationMs?.ToString("0.00", CultureInfo.InvariantCulture) ?? "-");
AnsiConsole.Write(table);
return 0;
}
internal static async Task<int> HandleNotifyAckAsync(
IServiceProvider services,
string? tenant,
string? incidentId,
string? token,
string? acknowledgedBy,
string? comment,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
SetVerbosity(services, verbose);
var client = services.GetRequiredService<INotifyClient>();
if (string.IsNullOrWhiteSpace(token) && string.IsNullOrWhiteSpace(incidentId))
{
AnsiConsole.MarkupLine("[red]Either --token or --incident-id is required.[/]");
return 1;
}
var request = new NotifyAckRequest
{
TenantId = tenant,
IncidentId = incidentId,
Token = token,
AcknowledgedBy = acknowledgedBy,
Comment = comment
};
var result = await client.AckAsync(request, cancellationToken).ConfigureAwait(false);
if (json)
{
AnsiConsole.WriteLine(JsonSerializer.Serialize(result, JsonOptions));
return 0;
}
if (!result.Success)
{
AnsiConsole.MarkupLine($"[red]Acknowledge failed:[/] {Markup.Escape(result.Error ?? "unknown error")}");
return 1;
}
AnsiConsole.MarkupLine($"[green]Acknowledged.[/] incidentId={Markup.Escape(result.IncidentId ?? incidentId ?? "n/a")}");
return 0;
}
private static JsonElement? LoadJsonElement(string? filePath)
{
if (string.IsNullOrWhiteSpace(filePath))
{
return null;
}
try
{
var content = File.ReadAllText(filePath);
using var doc = JsonDocument.Parse(content);
return doc.RootElement.Clone();
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"[yellow]Failed to load JSON from {Markup.Escape(filePath)}:[/] {Markup.Escape(ex.Message)}");
return null;
}
}
internal static async Task<int> HandleNotifyChannelsListAsync(
IServiceProvider services,
string? tenant,

View File

@@ -15,6 +15,7 @@ using StellaOps.Cli.Telemetry;
using StellaOps.AirGap.Policy;
using StellaOps.Configuration;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.ExportCenter.Client;
namespace StellaOps.Cli;
@@ -124,6 +125,16 @@ internal static class Program
}
}).AddEgressPolicyGuard("stellaops-cli", "backend-api");
services.AddHttpClient<IExportCenterClient, ExportCenterClient>(client =>
{
client.Timeout = TimeSpan.FromMinutes(10);
if (!string.IsNullOrWhiteSpace(options.BackendUrl) &&
Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var exportCenterUri))
{
client.BaseAddress = exportCenterUri;
}
}).AddEgressPolicyGuard("stellaops-cli", "export-center-api");
services.AddHttpClient<IConcelierObservationsClient, ConcelierObservationsClient>(client =>
{
client.Timeout = TimeSpan.FromSeconds(30);

View File

@@ -67,4 +67,18 @@ internal interface INotifyClient
Task<NotifySendResult> SendAsync(
NotifySendRequest request,
CancellationToken cancellationToken);
/// <summary>
/// Simulate rule evaluation.
/// </summary>
Task<NotifySimulationResult> SimulateAsync(
NotifySimulationRequest request,
CancellationToken cancellationToken);
/// <summary>
/// Acknowledge an incident or signed token.
/// </summary>
Task<NotifyAckResult> AckAsync(
NotifyAckRequest request,
CancellationToken cancellationToken);
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Cli.Services.Models;
@@ -610,3 +611,83 @@ internal sealed class NotifySendResult
[JsonPropertyName("idempotencyKey")]
public string? IdempotencyKey { get; init; }
}
internal sealed class NotifySimulationRequest
{
[JsonPropertyName("tenantId")]
public string? TenantId { get; init; }
[JsonPropertyName("events")]
public JsonElement? Events { get; init; }
[JsonPropertyName("rules")]
public JsonElement? Rules { get; init; }
[JsonPropertyName("enabledRulesOnly")]
public bool? EnabledRulesOnly { get; init; }
[JsonPropertyName("historicalLookbackMinutes")]
public int? HistoricalLookbackMinutes { get; init; }
[JsonPropertyName("maxEvents")]
public int? MaxEvents { get; init; }
[JsonPropertyName("eventKindFilter")]
public string? EventKindFilter { get; init; }
[JsonPropertyName("includeNonMatches")]
public bool? IncludeNonMatches { get; init; }
}
internal sealed class NotifySimulationResult
{
[JsonPropertyName("simulationId")]
public string? SimulationId { get; init; }
[JsonPropertyName("totalEvents")]
public int? TotalEvents { get; init; }
[JsonPropertyName("totalRules")]
public int? TotalRules { get; init; }
[JsonPropertyName("matchedEvents")]
public int? MatchedEvents { get; init; }
[JsonPropertyName("totalActionsTriggered")]
public int? TotalActionsTriggered { get; init; }
[JsonPropertyName("durationMs")]
public double? DurationMs { get; init; }
}
internal sealed class NotifyAckRequest
{
[JsonPropertyName("tenantId")]
public string? TenantId { get; init; }
[JsonPropertyName("incidentId")]
public string? IncidentId { get; init; }
[JsonPropertyName("acknowledgedBy")]
public string? AcknowledgedBy { get; init; }
[JsonPropertyName("comment")]
public string? Comment { get; init; }
public string? Token { get; init; }
}
internal sealed class NotifyAckResult
{
[JsonPropertyName("success")]
public bool Success { get; init; }
[JsonPropertyName("incidentId")]
public string? IncidentId { get; init; }
[JsonPropertyName("error")]
public string? Error { get; init; }
[JsonPropertyName("message")]
public string? Message { get; init; }
}

View File

@@ -569,6 +569,131 @@ internal sealed class NotifyClient : INotifyClient
}
}
public async Task<NotifySimulationResult> SimulateAsync(
NotifySimulationRequest request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
try
{
EnsureConfigured();
var json = JsonSerializer.Serialize(request, SerializerOptions);
using var content = new StringContent(json, Encoding.UTF8, "application/json");
using var httpRequest = new HttpRequestMessage(HttpMethod.Post, "/api/v2/simulate")
{
Content = content
};
if (!string.IsNullOrWhiteSpace(request.TenantId))
{
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", request.TenantId);
}
await AuthorizeRequestAsync(httpRequest, "notify.simulate", cancellationToken).ConfigureAwait(false);
using var response = await httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
logger.LogError(
"Failed to simulate notify rules (status {StatusCode}). Response: {Payload}",
(int)response.StatusCode,
string.IsNullOrWhiteSpace(payload) ? "<empty>" : payload);
return new NotifySimulationResult { SimulationId = null, TotalEvents = 0, TotalRules = 0 };
}
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
var result = await JsonSerializer
.DeserializeAsync<NotifySimulationResult>(stream, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
return result ?? new NotifySimulationResult { SimulationId = null, TotalEvents = 0, TotalRules = 0 };
}
catch (HttpRequestException ex)
{
logger.LogError(ex, "HTTP error while simulating notify rules");
return new NotifySimulationResult { SimulationId = null, TotalEvents = 0, TotalRules = 0 };
}
catch (TaskCanceledException ex) when (!cancellationToken.IsCancellationRequested)
{
logger.LogError(ex, "Request timed out while simulating notify rules");
return new NotifySimulationResult { SimulationId = null, TotalEvents = 0, TotalRules = 0 };
}
}
public async Task<NotifyAckResult> AckAsync(
NotifyAckRequest request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
try
{
EnsureConfigured();
var hasToken = !string.IsNullOrWhiteSpace(request.Token);
using var httpRequest = hasToken
? new HttpRequestMessage(HttpMethod.Get, $"/api/v2/ack?token={Uri.EscapeDataString(request.Token!)}")
: new HttpRequestMessage(HttpMethod.Post, "/api/v2/ack")
{
Content = new StringContent(JsonSerializer.Serialize(new AckApiRequestBody
{
TenantId = request.TenantId,
IncidentId = request.IncidentId,
AcknowledgedBy = request.AcknowledgedBy,
Comment = request.Comment
}, SerializerOptions), Encoding.UTF8, "application/json")
};
if (!string.IsNullOrWhiteSpace(request.TenantId))
{
httpRequest.Headers.TryAddWithoutValidation("X-Tenant-Id", request.TenantId);
}
await AuthorizeRequestAsync(httpRequest, "notify.write", cancellationToken).ConfigureAwait(false);
using var response = await httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
logger.LogError(
"Failed to acknowledge notification (status {StatusCode}). Response: {Payload}",
(int)response.StatusCode,
string.IsNullOrWhiteSpace(payload) ? "<empty>" : payload);
return new NotifyAckResult { Success = false, IncidentId = request.IncidentId, Error = payload };
}
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
var result = await JsonSerializer
.DeserializeAsync<NotifyAckResult>(stream, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
return result ?? new NotifyAckResult { Success = true, IncidentId = request.IncidentId };
}
catch (HttpRequestException ex)
{
logger.LogError(ex, "HTTP error while acknowledging notification");
return new NotifyAckResult { Success = false, IncidentId = request.IncidentId, Error = ex.Message };
}
catch (TaskCanceledException ex) when (!cancellationToken.IsCancellationRequested)
{
logger.LogError(ex, "Request timed out while acknowledging notification");
return new NotifyAckResult { Success = false, IncidentId = request.IncidentId, Error = "Request timed out" };
}
}
private sealed record AckApiRequestBody
{
public string? TenantId { get; init; }
public string? IncidentId { get; init; }
public string? AcknowledgedBy { get; init; }
public string? Comment { get; init; }
}
private static string BuildChannelListUri(NotifyChannelListRequest request)
{
var queryParams = new List<string>();

View File

@@ -70,6 +70,7 @@
<ProjectReference Include="../../Notify/__Libraries/StellaOps.Notify.Storage.Postgres/StellaOps.Notify.Storage.Postgres.csproj" />
<ProjectReference Include="../../Excititor/__Libraries/StellaOps.Excititor.Storage.Postgres/StellaOps.Excititor.Storage.Postgres.csproj" />
<ProjectReference Include="../../Policy/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
<ProjectReference Include="../../ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Client/StellaOps.ExportCenter.Client.csproj" />
</ItemGroup>
<ItemGroup Condition="'$(StellaOpsEnableCryptoPro)' == 'true'">

View File

@@ -46,7 +46,7 @@
<PackageReference Include="Mongo2Go" Version="4.1.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.0" />
<Compile Include="$(ConcelierSharedTestsPath)AssemblyInfo.cs" Link="Shared\AssemblyInfo.cs" Condition="'$(ConcelierSharedTestsPath)' != ''" />
<Compile Include="$(ConcelierSharedTestsPath)MongoFixtureCollection.cs" Link="Shared\MongoFixtureCollection.cs" Condition="'$(ConcelierSharedTestsPath)' != ''" />
<ProjectReference Include="$(ConcelierTestingPath)StellaOps.Concelier.Testing.csproj" Condition="'$(ConcelierTestingPath)' != ''" />

View File

@@ -1,7 +1,9 @@
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core.Evidence;
using StellaOps.Excititor.Core.Storage;
using StellaOps.Excititor.WebService.Services;
using static Program;
@@ -9,16 +11,22 @@ using static Program;
namespace StellaOps.Excititor.WebService.Endpoints;
/// <summary>
/// Attestation API endpoints (temporarily disabled while Mongo is removed and Postgres storage is adopted).
/// Attestation API endpoints for listing and retrieving DSSE attestations.
/// </summary>
public static class AttestationEndpoints
{
public static void MapAttestationEndpoints(this WebApplication app)
{
// GET /attestations/vex/list
app.MapGet("/attestations/vex/list", (
app.MapGet("/attestations/vex/list", async (
HttpContext context,
IOptions<VexStorageOptions> storageOptions) =>
[FromQuery] string? since,
[FromQuery] string? until,
[FromQuery] int? limit,
[FromQuery] int? offset,
IOptions<VexStorageOptions> storageOptions,
[FromServices] IVexAttestationStore? attestationStore,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
@@ -26,22 +34,55 @@ public static class AttestationEndpoints
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, requireHeader: false, out _, out var tenantError))
if (!TryResolveTenant(context, storageOptions.Value, requireHeader: false, out var tenant, out var tenantError))
{
return tenantError;
}
return Results.Problem(
detail: "Attestation listing is temporarily unavailable during Postgres migration (Mongo/BSON removed).",
statusCode: StatusCodes.Status503ServiceUnavailable,
title: "Service unavailable");
if (attestationStore is null)
{
return Results.Problem(
detail: "Attestation store is not configured.",
statusCode: StatusCodes.Status503ServiceUnavailable,
title: "Service unavailable");
}
var parsedSince = ParseTimestamp(since);
var parsedUntil = ParseTimestamp(until);
var query = new VexAttestationQuery(
tenant!,
parsedSince,
parsedUntil,
limit ?? 100,
offset ?? 0);
var result = await attestationStore.ListAsync(query, cancellationToken).ConfigureAwait(false);
var items = result.Items
.Select(a => new AttestationListItemDto(
a.AttestationId,
a.ManifestId,
a.MerkleRoot,
a.ItemCount,
a.AttestedAt))
.ToList();
var response = new AttestationListResponse(
items,
result.TotalCount,
result.HasMore);
return Results.Ok(response);
}).WithName("ListVexAttestations");
// GET /attestations/vex/{attestationId}
app.MapGet("/attestations/vex/{attestationId}", (
app.MapGet("/attestations/vex/{attestationId}", async (
HttpContext context,
string attestationId,
IOptions<VexStorageOptions> storageOptions) =>
IOptions<VexStorageOptions> storageOptions,
[FromServices] IVexAttestationStore? attestationStore,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
@@ -49,7 +90,7 @@ public static class AttestationEndpoints
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, requireHeader: false, out _, out var tenantError))
if (!TryResolveTenant(context, storageOptions.Value, requireHeader: false, out var tenant, out var tenantError))
{
return tenantError;
}
@@ -62,10 +103,69 @@ public static class AttestationEndpoints
title: "Validation error");
}
return Results.Problem(
detail: "Attestation retrieval is temporarily unavailable during Postgres migration (Mongo/BSON removed).",
statusCode: StatusCodes.Status503ServiceUnavailable,
title: "Service unavailable");
if (attestationStore is null)
{
return Results.Problem(
detail: "Attestation store is not configured.",
statusCode: StatusCodes.Status503ServiceUnavailable,
title: "Service unavailable");
}
var attestation = await attestationStore.FindByIdAsync(tenant!, attestationId, cancellationToken).ConfigureAwait(false);
if (attestation is null)
{
return Results.NotFound(new
{
error = new { code = "ERR_ATTESTATION_NOT_FOUND", message = $"Attestation '{attestationId}' not found" }
});
}
var response = new AttestationDetailResponse(
attestation.AttestationId,
attestation.Tenant,
attestation.ManifestId,
attestation.MerkleRoot,
attestation.DsseEnvelopeJson,
attestation.DsseEnvelopeHash,
attestation.ItemCount,
attestation.AttestedAt,
attestation.Metadata);
return Results.Ok(response);
}).WithName("GetVexAttestation");
}
private static DateTimeOffset? ParseTimestamp(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return DateTimeOffset.TryParse(value, out var parsed) ? parsed : null;
}
}
// Response DTOs
public sealed record AttestationListItemDto(
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("manifestId")] string ManifestId,
[property: JsonPropertyName("merkleRoot")] string MerkleRoot,
[property: JsonPropertyName("itemCount")] int ItemCount,
[property: JsonPropertyName("attestedAt")] DateTimeOffset AttestedAt);
public sealed record AttestationListResponse(
[property: JsonPropertyName("items")] IReadOnlyList<AttestationListItemDto> Items,
[property: JsonPropertyName("totalCount")] int TotalCount,
[property: JsonPropertyName("hasMore")] bool HasMore);
public sealed record AttestationDetailResponse(
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("manifestId")] string ManifestId,
[property: JsonPropertyName("merkleRoot")] string MerkleRoot,
[property: JsonPropertyName("dsseEnvelopeJson")] string DsseEnvelopeJson,
[property: JsonPropertyName("dsseEnvelopeHash")] string DsseEnvelopeHash,
[property: JsonPropertyName("itemCount")] int ItemCount,
[property: JsonPropertyName("attestedAt")] DateTimeOffset AttestedAt,
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string> Metadata);

View File

@@ -82,6 +82,9 @@ services.AddSingleton<IGraphOverlayStore>(sp =>
});
services.AddSingleton<IVexEvidenceLockerService, VexEvidenceLockerService>();
services.AddSingleton<IVexEvidenceAttestor, StellaOps.Excititor.Attestation.Evidence.VexEvidenceAttestor>();
// OBS-52/53/54: Attestation storage and timeline event recording
services.TryAddSingleton<IVexAttestationStore, InMemoryVexAttestationStore>();
services.TryAddSingleton<IVexTimelineEventRecorder, VexTimelineEventRecorder>();
services.AddScoped<IVexIngestOrchestrator, VexIngestOrchestrator>();
services.AddSingleton<VexStatementBackfillService>();
services.AddOptions<ExcititorObservabilityOptions>()

View File

@@ -12,6 +12,7 @@ using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Orchestration;
using StellaOps.Excititor.Core.Storage;
using StellaOps.Excititor.Worker.Options;
namespace StellaOps.Excititor.Worker.Orchestration;
@@ -19,10 +20,12 @@ namespace StellaOps.Excititor.Worker.Orchestration;
/// <summary>
/// Default implementation of <see cref="IVexWorkerOrchestratorClient"/>.
/// Stores heartbeats and artifacts locally and, when configured, mirrors them to the Orchestrator worker API.
/// Per EXCITITOR-ORCH-32/33: Uses append-only checkpoint store for deterministic persistence and replay.
/// </summary>
internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
{
private readonly IVexConnectorStateRepository _stateRepository;
private readonly IAppendOnlyCheckpointStore? _checkpointStore;
private readonly TimeProvider _timeProvider;
private readonly IOptions<VexWorkerOrchestratorOptions> _options;
private readonly ILogger<VexWorkerOrchestratorClient> _logger;
@@ -36,9 +39,11 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
TimeProvider timeProvider,
IOptions<VexWorkerOrchestratorOptions> options,
ILogger<VexWorkerOrchestratorClient> logger,
HttpClient? httpClient = null)
HttpClient? httpClient = null,
IAppendOnlyCheckpointStore? checkpointStore = null)
{
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_checkpointStore = checkpointStore;
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
@@ -150,6 +155,18 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
heartbeat.LastArtifactHash);
}
// Log to append-only checkpoint store (EXCITITOR-ORCH-32/33)
await LogCheckpointMutationAsync(
context,
CheckpointMutation.Heartbeat(
context.RunId,
timestamp,
cursor: null,
heartbeat.LastArtifactHash,
heartbeat.LastArtifactKind,
idempotencyKey: $"hb-{context.RunId}-{sequence}"),
cancellationToken).ConfigureAwait(false);
await SendRemoteHeartbeatAsync(context, heartbeat, cancellationToken).ConfigureAwait(false);
}
@@ -194,6 +211,17 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
artifact.Kind,
artifact.ProviderId);
// Log to append-only checkpoint store (EXCITITOR-ORCH-32/33)
await LogCheckpointMutationAsync(
context,
CheckpointMutation.Artifact(
context.RunId,
artifact.CreatedAt,
artifact.Hash,
artifact.Kind,
idempotencyKey: $"artifact-{artifact.Hash}"),
cancellationToken).ConfigureAwait(false);
await SendRemoteProgressForArtifactAsync(context, artifact, cancellationToken).ConfigureAwait(false);
}
@@ -232,6 +260,19 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
result.ClaimsGenerated,
duration);
// Log to append-only checkpoint store (EXCITITOR-ORCH-32/33)
await LogCheckpointMutationAsync(
context,
CheckpointMutation.Completed(
context.RunId,
result.CompletedAt,
result.LastCheckpoint,
result.DocumentsProcessed,
result.ClaimsGenerated,
result.LastArtifactHash,
idempotencyKey: $"complete-{context.RunId}"),
cancellationToken).ConfigureAwait(false);
await SendRemoteCompletionAsync(context, result, cancellationToken).ConfigureAwait(false);
}
@@ -271,6 +312,19 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
errorCode,
retryAfterSeconds);
// Log to append-only checkpoint store (EXCITITOR-ORCH-32/33)
await LogCheckpointMutationAsync(
context,
CheckpointMutation.Failed(
context.RunId,
now,
errorCode,
errorMessage,
retryAfterSeconds,
state.LastCheckpoint?.ToString("O"),
idempotencyKey: $"fail-{context.RunId}"),
cancellationToken).ConfigureAwait(false);
await SendRemoteCompletionAsync(
context,
new VexWorkerJobResult(0, 0, state.LastCheckpoint, state.LastArtifactHash, now),
@@ -363,6 +417,20 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
context.ConnectorId,
checkpoint.Cursor ?? "(none)",
checkpoint.ProcessedDigests.Length);
// Log to append-only checkpoint store (EXCITITOR-ORCH-32/33)
if (!string.IsNullOrEmpty(checkpoint.Cursor))
{
await LogCheckpointMutationAsync(
context,
CheckpointMutation.CursorUpdate(
context.RunId,
checkpoint.LastProcessedAt ?? now,
checkpoint.Cursor,
checkpoint.ProcessedDigests.Length,
idempotencyKey: $"cursor-{context.RunId}-{checkpoint.Cursor}"),
cancellationToken).ConfigureAwait(false);
}
}
public async ValueTask<VexWorkerCheckpoint?> LoadCheckpointAsync(
@@ -647,6 +715,93 @@ internal sealed class VexWorkerOrchestratorClient : IVexWorkerOrchestratorClient
private string Serialize(object value) => JsonSerializer.Serialize(value, _serializerOptions);
/// <summary>
/// Logs a checkpoint mutation to the append-only store for deterministic replay.
/// Per EXCITITOR-ORCH-32/33: All checkpoint mutations are logged for audit/replay.
/// </summary>
private async ValueTask LogCheckpointMutationAsync(
VexWorkerJobContext context,
CheckpointMutation mutation,
CancellationToken cancellationToken)
{
if (_checkpointStore is null)
{
return;
}
try
{
var result = await _checkpointStore.AppendAsync(
context.Tenant,
context.ConnectorId,
mutation,
cancellationToken).ConfigureAwait(false);
if (_options.Value.EnableVerboseLogging)
{
_logger.LogDebug(
"Checkpoint mutation logged: runId={RunId} type={Type} seq={Sequence} duplicate={IsDuplicate}",
context.RunId,
mutation.Type,
result.SequenceNumber,
result.WasDuplicate);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex,
"Failed to log checkpoint mutation for connector {ConnectorId}: {Type}",
context.ConnectorId,
mutation.Type);
}
}
/// <summary>
/// Gets the append-only mutation log for a connector.
/// Per EXCITITOR-ORCH-32/33: Enables deterministic replay.
/// </summary>
public async ValueTask<IReadOnlyList<CheckpointMutationEvent>> GetCheckpointMutationLogAsync(
string tenant,
string connectorId,
long? sinceSequence = null,
int limit = 100,
CancellationToken cancellationToken = default)
{
if (_checkpointStore is null)
{
return Array.Empty<CheckpointMutationEvent>();
}
return await _checkpointStore.GetMutationLogAsync(
tenant,
connectorId,
sinceSequence,
limit,
cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Replays checkpoint mutations to reconstruct state at a specific sequence.
/// Per EXCITITOR-ORCH-32/33: Deterministic replay for audit/recovery.
/// </summary>
public async ValueTask<CheckpointState?> ReplayCheckpointToSequenceAsync(
string tenant,
string connectorId,
long upToSequence,
CancellationToken cancellationToken = default)
{
if (_checkpointStore is null)
{
return null;
}
return await _checkpointStore.ReplayToSequenceAsync(
tenant,
connectorId,
upToSequence,
cancellationToken).ConfigureAwait(false);
}
private sealed record ClaimRequest(string WorkerId, string? TaskRunnerId, string? JobType, int? LeaseSeconds, string? IdempotencyKey);
private sealed record ClaimResponse(

View File

@@ -0,0 +1,178 @@
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Stored attestation record with DSSE envelope and manifest metadata.
/// </summary>
public sealed record VexStoredAttestation
{
public VexStoredAttestation(
string attestationId,
string tenant,
string manifestId,
string merkleRoot,
string dsseEnvelopeJson,
string dsseEnvelopeHash,
int itemCount,
DateTimeOffset attestedAt,
ImmutableDictionary<string, string>? metadata = null)
{
AttestationId = EnsureNotNullOrWhiteSpace(attestationId, nameof(attestationId));
Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
ManifestId = EnsureNotNullOrWhiteSpace(manifestId, nameof(manifestId));
MerkleRoot = EnsureNotNullOrWhiteSpace(merkleRoot, nameof(merkleRoot));
DsseEnvelopeJson = EnsureNotNullOrWhiteSpace(dsseEnvelopeJson, nameof(dsseEnvelopeJson));
DsseEnvelopeHash = EnsureNotNullOrWhiteSpace(dsseEnvelopeHash, nameof(dsseEnvelopeHash));
ItemCount = itemCount;
AttestedAt = attestedAt.ToUniversalTime();
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// Unique attestation identifier.
/// </summary>
public string AttestationId { get; }
/// <summary>
/// Tenant this attestation belongs to.
/// </summary>
public string Tenant { get; }
/// <summary>
/// Manifest ID the attestation covers.
/// </summary>
public string ManifestId { get; }
/// <summary>
/// Merkle root of the manifest items.
/// </summary>
public string MerkleRoot { get; }
/// <summary>
/// DSSE envelope as JSON string.
/// </summary>
public string DsseEnvelopeJson { get; }
/// <summary>
/// SHA-256 hash of the DSSE envelope.
/// </summary>
public string DsseEnvelopeHash { get; }
/// <summary>
/// Number of items in the manifest.
/// </summary>
public int ItemCount { get; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset AttestedAt { get; }
/// <summary>
/// Additional metadata.
/// </summary>
public ImmutableDictionary<string, string> Metadata { get; }
/// <summary>
/// Creates a stored attestation from an attestation result.
/// </summary>
public static VexStoredAttestation FromResult(VexEvidenceAttestationResult result)
{
ArgumentNullException.ThrowIfNull(result);
return new VexStoredAttestation(
result.AttestationId,
result.SignedManifest.Tenant,
result.SignedManifest.ManifestId,
result.SignedManifest.MerkleRoot,
result.DsseEnvelopeJson,
result.DsseEnvelopeHash,
result.SignedManifest.Items.Length,
result.AttestedAt,
result.SignedManifest.Metadata);
}
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
}
/// <summary>
/// Query parameters for attestation listing.
/// </summary>
public sealed record VexAttestationQuery
{
public VexAttestationQuery(
string tenant,
DateTimeOffset? since = null,
DateTimeOffset? until = null,
int limit = 100,
int offset = 0)
{
Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
Since = since;
Until = until;
Limit = Math.Clamp(limit, 1, 1000);
Offset = Math.Max(0, offset);
}
public string Tenant { get; }
public DateTimeOffset? Since { get; }
public DateTimeOffset? Until { get; }
public int Limit { get; }
public int Offset { get; }
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
}
/// <summary>
/// Result of an attestation list query.
/// </summary>
public sealed record VexAttestationListResult
{
public VexAttestationListResult(
IReadOnlyList<VexStoredAttestation> items,
int totalCount,
bool hasMore)
{
Items = items ?? Array.Empty<VexStoredAttestation>();
TotalCount = totalCount;
HasMore = hasMore;
}
public IReadOnlyList<VexStoredAttestation> Items { get; }
public int TotalCount { get; }
public bool HasMore { get; }
}
/// <summary>
/// Storage interface for VEX attestations.
/// </summary>
public interface IVexAttestationStore
{
/// <summary>
/// Saves an attestation to the store.
/// </summary>
ValueTask SaveAsync(VexStoredAttestation attestation, CancellationToken cancellationToken);
/// <summary>
/// Finds an attestation by ID.
/// </summary>
ValueTask<VexStoredAttestation?> FindByIdAsync(string tenant, string attestationId, CancellationToken cancellationToken);
/// <summary>
/// Finds an attestation by manifest ID.
/// </summary>
ValueTask<VexStoredAttestation?> FindByManifestIdAsync(string tenant, string manifestId, CancellationToken cancellationToken);
/// <summary>
/// Lists attestations matching the query.
/// </summary>
ValueTask<VexAttestationListResult> ListAsync(VexAttestationQuery query, CancellationToken cancellationToken);
/// <summary>
/// Counts attestations for a tenant.
/// </summary>
ValueTask<int> CountAsync(string tenant, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,324 @@
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Event types for VEX evidence timeline recording.
/// </summary>
public static class VexTimelineEventTypes
{
public const string AttestationCreated = "vex.attestation.created";
public const string AttestationVerified = "vex.attestation.verified";
public const string AttestationFailed = "vex.attestation.failed";
public const string ManifestBuilt = "vex.manifest.built";
public const string ManifestVerified = "vex.manifest.verified";
public const string ManifestVerificationFailed = "vex.manifest.verification_failed";
public const string EvidenceBatchProcessed = "vex.evidence.batch_processed";
public const string EvidenceBatchFailed = "vex.evidence.batch_failed";
public const string LockerSealed = "vex.locker.sealed";
public const string LockerOpened = "vex.locker.opened";
}
/// <summary>
/// Timeline event for VEX evidence operations.
/// </summary>
public sealed record VexTimelineEvent
{
public VexTimelineEvent(
string eventId,
string eventType,
string tenant,
DateTimeOffset occurredAt,
string? manifestId = null,
string? attestationId = null,
string? merkleRoot = null,
int? itemCount = null,
string? errorCode = null,
string? message = null,
ImmutableDictionary<string, string>? metadata = null)
{
EventId = EnsureNotNullOrWhiteSpace(eventId, nameof(eventId));
EventType = EnsureNotNullOrWhiteSpace(eventType, nameof(eventType));
Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
OccurredAt = occurredAt.ToUniversalTime();
ManifestId = TrimToNull(manifestId);
AttestationId = TrimToNull(attestationId);
MerkleRoot = TrimToNull(merkleRoot);
ItemCount = itemCount;
ErrorCode = TrimToNull(errorCode);
Message = TrimToNull(message);
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// Unique event identifier.
/// </summary>
public string EventId { get; }
/// <summary>
/// Type of event (see <see cref="VexTimelineEventTypes"/>).
/// </summary>
public string EventType { get; }
/// <summary>
/// Tenant this event belongs to.
/// </summary>
public string Tenant { get; }
/// <summary>
/// When the event occurred.
/// </summary>
public DateTimeOffset OccurredAt { get; }
/// <summary>
/// Related manifest ID if applicable.
/// </summary>
public string? ManifestId { get; }
/// <summary>
/// Related attestation ID if applicable.
/// </summary>
public string? AttestationId { get; }
/// <summary>
/// Merkle root if applicable.
/// </summary>
public string? MerkleRoot { get; }
/// <summary>
/// Number of items involved if applicable.
/// </summary>
public int? ItemCount { get; }
/// <summary>
/// Error code for failure events.
/// </summary>
public string? ErrorCode { get; }
/// <summary>
/// Human-readable message.
/// </summary>
public string? Message { get; }
/// <summary>
/// Additional metadata.
/// </summary>
public ImmutableDictionary<string, string> Metadata { get; }
private static string EnsureNotNullOrWhiteSpace(string value, string name)
=> string.IsNullOrWhiteSpace(value) ? throw new ArgumentException($"{name} must be provided.", name) : value.Trim();
private static string? TrimToNull(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}
/// <summary>
/// Interface for recording VEX evidence timeline events.
/// </summary>
public interface IVexTimelineEventRecorder
{
/// <summary>
/// Records a timeline event.
/// </summary>
ValueTask RecordAsync(VexTimelineEvent evt, CancellationToken cancellationToken);
/// <summary>
/// Records an attestation created event.
/// </summary>
ValueTask RecordAttestationCreatedAsync(
string tenant,
string attestationId,
string manifestId,
string merkleRoot,
int itemCount,
CancellationToken cancellationToken);
/// <summary>
/// Records an attestation verification event.
/// </summary>
ValueTask RecordAttestationVerifiedAsync(
string tenant,
string attestationId,
bool isValid,
string? errorCode,
string? message,
CancellationToken cancellationToken);
/// <summary>
/// Records a manifest built event.
/// </summary>
ValueTask RecordManifestBuiltAsync(
string tenant,
string manifestId,
string merkleRoot,
int itemCount,
CancellationToken cancellationToken);
/// <summary>
/// Records an evidence batch processed event.
/// </summary>
ValueTask RecordBatchProcessedAsync(
string tenant,
int itemCount,
string? manifestId,
CancellationToken cancellationToken);
/// <summary>
/// Lists timeline events for a tenant.
/// </summary>
ValueTask<IReadOnlyList<VexTimelineEvent>> ListEventsAsync(
string tenant,
string? eventType,
DateTimeOffset? since,
int limit,
CancellationToken cancellationToken);
}
/// <summary>
/// Default implementation of <see cref="IVexTimelineEventRecorder"/>.
/// </summary>
public sealed class VexTimelineEventRecorder : IVexTimelineEventRecorder
{
private readonly TimeProvider _timeProvider;
private readonly List<VexTimelineEvent> _events = new();
private readonly object _lock = new();
private long _sequence;
public VexTimelineEventRecorder(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public ValueTask RecordAsync(VexTimelineEvent evt, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(evt);
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
_events.Add(evt);
}
return ValueTask.CompletedTask;
}
public ValueTask RecordAttestationCreatedAsync(
string tenant,
string attestationId,
string manifestId,
string merkleRoot,
int itemCount,
CancellationToken cancellationToken)
{
var evt = new VexTimelineEvent(
CreateEventId(),
VexTimelineEventTypes.AttestationCreated,
tenant,
_timeProvider.GetUtcNow(),
manifestId,
attestationId,
merkleRoot,
itemCount);
return RecordAsync(evt, cancellationToken);
}
public ValueTask RecordAttestationVerifiedAsync(
string tenant,
string attestationId,
bool isValid,
string? errorCode,
string? message,
CancellationToken cancellationToken)
{
var eventType = isValid
? VexTimelineEventTypes.AttestationVerified
: VexTimelineEventTypes.AttestationFailed;
var evt = new VexTimelineEvent(
CreateEventId(),
eventType,
tenant,
_timeProvider.GetUtcNow(),
attestationId: attestationId,
errorCode: errorCode,
message: message);
return RecordAsync(evt, cancellationToken);
}
public ValueTask RecordManifestBuiltAsync(
string tenant,
string manifestId,
string merkleRoot,
int itemCount,
CancellationToken cancellationToken)
{
var evt = new VexTimelineEvent(
CreateEventId(),
VexTimelineEventTypes.ManifestBuilt,
tenant,
_timeProvider.GetUtcNow(),
manifestId,
merkleRoot: merkleRoot,
itemCount: itemCount);
return RecordAsync(evt, cancellationToken);
}
public ValueTask RecordBatchProcessedAsync(
string tenant,
int itemCount,
string? manifestId,
CancellationToken cancellationToken)
{
var evt = new VexTimelineEvent(
CreateEventId(),
VexTimelineEventTypes.EvidenceBatchProcessed,
tenant,
_timeProvider.GetUtcNow(),
manifestId,
itemCount: itemCount);
return RecordAsync(evt, cancellationToken);
}
public ValueTask<IReadOnlyList<VexTimelineEvent>> ListEventsAsync(
string tenant,
string? eventType,
DateTimeOffset? since,
int limit,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var query = _events
.Where(e => string.Equals(e.Tenant, tenant, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(eventType))
{
query = query.Where(e => string.Equals(e.EventType, eventType, StringComparison.OrdinalIgnoreCase));
}
if (since.HasValue)
{
query = query.Where(e => e.OccurredAt >= since.Value);
}
var results = query
.OrderByDescending(e => e.OccurredAt)
.Take(Math.Clamp(limit, 1, 1000))
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexTimelineEvent>>(results);
}
}
private string CreateEventId()
{
var seq = Interlocked.Increment(ref _sequence);
return $"evt:{_timeProvider.GetUtcNow():yyyyMMddHHmmss}:{seq:D6}";
}
}

View File

@@ -0,0 +1,498 @@
namespace StellaOps.Excititor.Core.Storage;
/// <summary>
/// Append-only checkpoint store for deterministic connector state persistence.
/// Per EXCITITOR-ORCH-32/33: Deterministic checkpoint persistence using Postgres append-only store.
/// Mutations are logged and never modified; current state is derived from the log.
/// </summary>
public interface IAppendOnlyCheckpointStore
{
/// <summary>
/// Appends a new checkpoint mutation for a connector.
/// Thread-safe and idempotent (duplicate mutations are deduplicated by sequence).
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="mutation">The checkpoint mutation to append.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The append result with sequence number.</returns>
ValueTask<AppendCheckpointResult> AppendAsync(
string tenant,
string connectorId,
CheckpointMutation mutation,
CancellationToken cancellationToken);
/// <summary>
/// Gets the current checkpoint state (derived from mutation log).
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Current checkpoint state or null if none exists.</returns>
ValueTask<CheckpointState?> GetCurrentStateAsync(
string tenant,
string connectorId,
CancellationToken cancellationToken);
/// <summary>
/// Gets the mutation log for a connector (for audit/replay).
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="sinceSequence">Return mutations after this sequence number (exclusive).</param>
/// <param name="limit">Maximum number of mutations to return.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of mutations in chronological order.</returns>
ValueTask<IReadOnlyList<CheckpointMutationEvent>> GetMutationLogAsync(
string tenant,
string connectorId,
long? sinceSequence,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Replays mutations to reconstruct state at a specific point in time.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="upToSequence">Replay mutations up to and including this sequence.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>State as of the specified sequence.</returns>
ValueTask<CheckpointState?> ReplayToSequenceAsync(
string tenant,
string connectorId,
long upToSequence,
CancellationToken cancellationToken);
}
/// <summary>
/// Result of an append operation.
/// </summary>
public sealed record AppendCheckpointResult
{
private AppendCheckpointResult(
bool success,
long sequenceNumber,
bool wasDuplicate,
CheckpointState currentState,
string? errorMessage = null)
{
Success = success;
SequenceNumber = sequenceNumber;
WasDuplicate = wasDuplicate;
CurrentState = currentState;
ErrorMessage = errorMessage;
}
/// <summary>
/// Whether the append was successful.
/// </summary>
public bool Success { get; }
/// <summary>
/// Monotonic sequence number for this mutation.
/// </summary>
public long SequenceNumber { get; }
/// <summary>
/// True if this mutation was a duplicate (idempotent).
/// </summary>
public bool WasDuplicate { get; }
/// <summary>
/// Current state after this mutation.
/// </summary>
public CheckpointState CurrentState { get; }
/// <summary>
/// Error message if the append failed.
/// </summary>
public string? ErrorMessage { get; }
public static AppendCheckpointResult Appended(long sequenceNumber, CheckpointState state)
=> new(true, sequenceNumber, wasDuplicate: false, state);
public static AppendCheckpointResult Duplicate(long sequenceNumber, CheckpointState state)
=> new(true, sequenceNumber, wasDuplicate: true, state);
public static AppendCheckpointResult Failed(string error)
=> new(false, 0, wasDuplicate: false, CheckpointState.Empty, error);
}
/// <summary>
/// Checkpoint mutation to be appended to the log.
/// </summary>
public sealed record CheckpointMutation(
CheckpointMutationType Type,
Guid RunId,
DateTimeOffset Timestamp,
string? Cursor,
string? ArtifactHash,
string? ArtifactKind,
int? DocumentsProcessed,
int? ClaimsGenerated,
string? ErrorCode,
string? ErrorMessage,
int? RetryAfterSeconds,
string? IdempotencyKey = null)
{
/// <summary>
/// Creates a heartbeat mutation.
/// </summary>
public static CheckpointMutation Heartbeat(
Guid runId,
DateTimeOffset timestamp,
string? cursor = null,
string? artifactHash = null,
string? artifactKind = null,
string? idempotencyKey = null)
=> new(
CheckpointMutationType.Heartbeat,
runId,
timestamp,
cursor,
artifactHash,
artifactKind,
DocumentsProcessed: null,
ClaimsGenerated: null,
ErrorCode: null,
ErrorMessage: null,
RetryAfterSeconds: null,
idempotencyKey);
/// <summary>
/// Creates a checkpoint cursor update mutation.
/// </summary>
public static CheckpointMutation CursorUpdate(
Guid runId,
DateTimeOffset timestamp,
string cursor,
int? documentsProcessed = null,
string? idempotencyKey = null)
=> new(
CheckpointMutationType.CursorUpdate,
runId,
timestamp,
cursor,
ArtifactHash: null,
ArtifactKind: null,
documentsProcessed,
ClaimsGenerated: null,
ErrorCode: null,
ErrorMessage: null,
RetryAfterSeconds: null,
idempotencyKey);
/// <summary>
/// Creates a completion mutation.
/// </summary>
public static CheckpointMutation Completed(
Guid runId,
DateTimeOffset timestamp,
string? cursor,
int documentsProcessed,
int claimsGenerated,
string? artifactHash = null,
string? idempotencyKey = null)
=> new(
CheckpointMutationType.Completed,
runId,
timestamp,
cursor,
artifactHash,
ArtifactKind: null,
documentsProcessed,
claimsGenerated,
ErrorCode: null,
ErrorMessage: null,
RetryAfterSeconds: null,
idempotencyKey);
/// <summary>
/// Creates a failure mutation.
/// </summary>
public static CheckpointMutation Failed(
Guid runId,
DateTimeOffset timestamp,
string errorCode,
string? errorMessage = null,
int? retryAfterSeconds = null,
string? cursor = null,
string? idempotencyKey = null)
=> new(
CheckpointMutationType.Failed,
runId,
timestamp,
cursor,
ArtifactHash: null,
ArtifactKind: null,
DocumentsProcessed: null,
ClaimsGenerated: null,
errorCode,
errorMessage,
retryAfterSeconds,
idempotencyKey);
/// <summary>
/// Creates an artifact mutation.
/// </summary>
public static CheckpointMutation Artifact(
Guid runId,
DateTimeOffset timestamp,
string artifactHash,
string artifactKind,
string? idempotencyKey = null)
=> new(
CheckpointMutationType.Artifact,
runId,
timestamp,
Cursor: null,
artifactHash,
artifactKind,
DocumentsProcessed: null,
ClaimsGenerated: null,
ErrorCode: null,
ErrorMessage: null,
RetryAfterSeconds: null,
idempotencyKey);
}
/// <summary>
/// Types of checkpoint mutations.
/// </summary>
public enum CheckpointMutationType
{
/// <summary>
/// Run started.
/// </summary>
Started,
/// <summary>
/// Heartbeat/progress update.
/// </summary>
Heartbeat,
/// <summary>
/// Checkpoint cursor update.
/// </summary>
CursorUpdate,
/// <summary>
/// Artifact recorded.
/// </summary>
Artifact,
/// <summary>
/// Run completed successfully.
/// </summary>
Completed,
/// <summary>
/// Run failed.
/// </summary>
Failed
}
/// <summary>
/// Persisted checkpoint mutation event (with sequence number).
/// </summary>
public sealed record CheckpointMutationEvent(
long SequenceNumber,
CheckpointMutationType Type,
Guid RunId,
DateTimeOffset Timestamp,
string? Cursor,
string? ArtifactHash,
string? ArtifactKind,
int? DocumentsProcessed,
int? ClaimsGenerated,
string? ErrorCode,
string? ErrorMessage,
int? RetryAfterSeconds,
string? IdempotencyKey);
/// <summary>
/// Current checkpoint state (derived from mutation log).
/// </summary>
public sealed record CheckpointState
{
public CheckpointState(
string connectorId,
string? cursor,
DateTimeOffset lastUpdated,
Guid? lastRunId,
CheckpointMutationType? lastMutationType,
string? lastArtifactHash,
string? lastArtifactKind,
int totalDocumentsProcessed,
int totalClaimsGenerated,
int successCount,
int failureCount,
string? lastErrorCode,
DateTimeOffset? nextEligibleRun,
long latestSequenceNumber)
{
ConnectorId = connectorId ?? throw new ArgumentNullException(nameof(connectorId));
Cursor = cursor;
LastUpdated = lastUpdated;
LastRunId = lastRunId;
LastMutationType = lastMutationType;
LastArtifactHash = lastArtifactHash;
LastArtifactKind = lastArtifactKind;
TotalDocumentsProcessed = totalDocumentsProcessed;
TotalClaimsGenerated = totalClaimsGenerated;
SuccessCount = successCount;
FailureCount = failureCount;
LastErrorCode = lastErrorCode;
NextEligibleRun = nextEligibleRun;
LatestSequenceNumber = latestSequenceNumber;
}
/// <summary>
/// Connector identifier.
/// </summary>
public string ConnectorId { get; }
/// <summary>
/// Current checkpoint cursor.
/// </summary>
public string? Cursor { get; }
/// <summary>
/// When the checkpoint was last updated.
/// </summary>
public DateTimeOffset LastUpdated { get; }
/// <summary>
/// Last run ID.
/// </summary>
public Guid? LastRunId { get; }
/// <summary>
/// Last mutation type.
/// </summary>
public CheckpointMutationType? LastMutationType { get; }
/// <summary>
/// Last artifact hash.
/// </summary>
public string? LastArtifactHash { get; }
/// <summary>
/// Last artifact kind.
/// </summary>
public string? LastArtifactKind { get; }
/// <summary>
/// Total documents processed across all runs.
/// </summary>
public int TotalDocumentsProcessed { get; }
/// <summary>
/// Total claims generated across all runs.
/// </summary>
public int TotalClaimsGenerated { get; }
/// <summary>
/// Number of successful runs.
/// </summary>
public int SuccessCount { get; }
/// <summary>
/// Number of failed runs.
/// </summary>
public int FailureCount { get; }
/// <summary>
/// Last error code (from most recent failure).
/// </summary>
public string? LastErrorCode { get; }
/// <summary>
/// When the connector is next eligible to run.
/// </summary>
public DateTimeOffset? NextEligibleRun { get; }
/// <summary>
/// Latest sequence number in the mutation log.
/// </summary>
public long LatestSequenceNumber { get; }
/// <summary>
/// Whether the connector is eligible to run now.
/// </summary>
public bool IsEligibleToRun(DateTimeOffset now)
=> NextEligibleRun is null || now >= NextEligibleRun.Value;
/// <summary>
/// Empty state for a new connector.
/// </summary>
public static CheckpointState Empty => new(
connectorId: string.Empty,
cursor: null,
lastUpdated: DateTimeOffset.MinValue,
lastRunId: null,
lastMutationType: null,
lastArtifactHash: null,
lastArtifactKind: null,
totalDocumentsProcessed: 0,
totalClaimsGenerated: 0,
successCount: 0,
failureCount: 0,
lastErrorCode: null,
nextEligibleRun: null,
latestSequenceNumber: 0);
/// <summary>
/// Creates a new state for a connector.
/// </summary>
public static CheckpointState Initial(string connectorId) => new(
connectorId,
cursor: null,
lastUpdated: DateTimeOffset.MinValue,
lastRunId: null,
lastMutationType: null,
lastArtifactHash: null,
lastArtifactKind: null,
totalDocumentsProcessed: 0,
totalClaimsGenerated: 0,
successCount: 0,
failureCount: 0,
lastErrorCode: null,
nextEligibleRun: null,
latestSequenceNumber: 0);
/// <summary>
/// Applies a mutation to produce a new state.
/// </summary>
public CheckpointState Apply(CheckpointMutationEvent mutation)
{
var newCursor = mutation.Cursor ?? Cursor;
var newArtifactHash = mutation.ArtifactHash ?? LastArtifactHash;
var newArtifactKind = mutation.ArtifactKind ?? LastArtifactKind;
var newDocsProcessed = TotalDocumentsProcessed + (mutation.DocumentsProcessed ?? 0);
var newClaimsGenerated = TotalClaimsGenerated + (mutation.ClaimsGenerated ?? 0);
var newSuccessCount = mutation.Type == CheckpointMutationType.Completed ? SuccessCount + 1 : SuccessCount;
var newFailureCount = mutation.Type == CheckpointMutationType.Failed ? FailureCount + 1 : FailureCount;
var newErrorCode = mutation.Type == CheckpointMutationType.Failed ? mutation.ErrorCode : LastErrorCode;
var newNextEligible = mutation.Type == CheckpointMutationType.Failed && mutation.RetryAfterSeconds.HasValue
? mutation.Timestamp.AddSeconds(mutation.RetryAfterSeconds.Value)
: (mutation.Type == CheckpointMutationType.Completed ? null : NextEligibleRun);
return new CheckpointState(
ConnectorId.Length > 0 ? ConnectorId : "unknown",
newCursor,
mutation.Timestamp,
mutation.RunId,
mutation.Type,
newArtifactHash,
newArtifactKind,
newDocsProcessed,
newClaimsGenerated,
newSuccessCount,
newFailureCount,
newErrorCode,
newNextEligible,
mutation.SequenceNumber);
}
}

View File

@@ -7,6 +7,7 @@ using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.Excititor.Core.Evidence;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Core.Storage;
@@ -708,3 +709,105 @@ public sealed class InMemoryVexObservationStore : IVexObservationStore
return ValueTask.FromResult((long)count);
}
}
/// <summary>
/// In-memory attestation store for development and testing while Postgres backing is implemented.
/// </summary>
public sealed class InMemoryVexAttestationStore : IVexAttestationStore
{
private readonly ConcurrentDictionary<string, VexStoredAttestation> _attestations = new(StringComparer.OrdinalIgnoreCase);
public ValueTask SaveAsync(VexStoredAttestation attestation, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(attestation);
cancellationToken.ThrowIfCancellationRequested();
var key = CreateKey(attestation.Tenant, attestation.AttestationId);
_attestations[key] = attestation;
return ValueTask.CompletedTask;
}
public ValueTask<VexStoredAttestation?> FindByIdAsync(string tenant, string attestationId, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(tenant) || string.IsNullOrWhiteSpace(attestationId))
{
return ValueTask.FromResult<VexStoredAttestation?>(null);
}
var key = CreateKey(tenant.Trim().ToLowerInvariant(), attestationId.Trim());
_attestations.TryGetValue(key, out var attestation);
return ValueTask.FromResult<VexStoredAttestation?>(attestation);
}
public ValueTask<VexStoredAttestation?> FindByManifestIdAsync(string tenant, string manifestId, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(tenant) || string.IsNullOrWhiteSpace(manifestId))
{
return ValueTask.FromResult<VexStoredAttestation?>(null);
}
var normalizedTenant = tenant.Trim().ToLowerInvariant();
var result = _attestations.Values
.Where(a => string.Equals(a.Tenant, normalizedTenant, StringComparison.OrdinalIgnoreCase))
.FirstOrDefault(a => string.Equals(a.ManifestId, manifestId.Trim(), StringComparison.OrdinalIgnoreCase));
return ValueTask.FromResult<VexStoredAttestation?>(result);
}
public ValueTask<VexAttestationListResult> ListAsync(VexAttestationQuery query, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(query);
cancellationToken.ThrowIfCancellationRequested();
var filtered = _attestations.Values
.Where(a => string.Equals(a.Tenant, query.Tenant, StringComparison.OrdinalIgnoreCase));
if (query.Since.HasValue)
{
filtered = filtered.Where(a => a.AttestedAt >= query.Since.Value);
}
if (query.Until.HasValue)
{
filtered = filtered.Where(a => a.AttestedAt <= query.Until.Value);
}
var ordered = filtered
.OrderByDescending(a => a.AttestedAt)
.ThenBy(a => a.AttestationId, StringComparer.Ordinal)
.ToList();
var totalCount = ordered.Count;
var items = ordered
.Skip(query.Offset)
.Take(query.Limit)
.ToList();
var hasMore = query.Offset + items.Count < totalCount;
return ValueTask.FromResult(new VexAttestationListResult(items, totalCount, hasMore));
}
public ValueTask<int> CountAsync(string tenant, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(tenant))
{
return ValueTask.FromResult(0);
}
var normalizedTenant = tenant.Trim().ToLowerInvariant();
var count = _attestations.Values
.Count(a => string.Equals(a.Tenant, normalizedTenant, StringComparison.OrdinalIgnoreCase));
return ValueTask.FromResult(count);
}
private static string CreateKey(string tenant, string attestationId)
=> $"{tenant}|{attestationId}";
}

View File

@@ -0,0 +1,478 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Excititor.Core.Storage;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Excititor.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL-backed append-only checkpoint store for deterministic connector state persistence.
/// Per EXCITITOR-ORCH-32/33: Deterministic checkpoint persistence using Postgres append-only store.
/// </summary>
public sealed class PostgresAppendOnlyCheckpointStore : RepositoryBase<ExcititorDataSource>, IAppendOnlyCheckpointStore
{
private volatile bool _initialized;
private readonly SemaphoreSlim _initLock = new(1, 1);
public PostgresAppendOnlyCheckpointStore(ExcititorDataSource dataSource, ILogger<PostgresAppendOnlyCheckpointStore> logger)
: base(dataSource, logger)
{
}
public async ValueTask<AppendCheckpointResult> AppendAsync(
string tenant,
string connectorId,
CheckpointMutation mutation,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
ArgumentNullException.ThrowIfNull(mutation);
await EnsureTablesAsync(cancellationToken).ConfigureAwait(false);
// Check for idempotency (duplicate mutation)
if (!string.IsNullOrEmpty(mutation.IdempotencyKey))
{
var existing = await FindByIdempotencyKeyAsync(tenant, connectorId, mutation.IdempotencyKey, cancellationToken)
.ConfigureAwait(false);
if (existing is not null)
{
var currentState = await GetCurrentStateAsync(tenant, connectorId, cancellationToken).ConfigureAwait(false)
?? CheckpointState.Initial(connectorId);
return AppendCheckpointResult.Duplicate(existing.SequenceNumber, currentState);
}
}
await using var connection = await DataSource.OpenConnectionAsync(tenant, "writer", cancellationToken).ConfigureAwait(false);
// Insert mutation (sequence is auto-generated)
const string insertSql = """
INSERT INTO vex.checkpoint_mutations (
tenant_id, connector_id, mutation_type, run_id, timestamp,
cursor, artifact_hash, artifact_kind,
documents_processed, claims_generated,
error_code, error_message, retry_after_seconds,
idempotency_key)
VALUES (
@tenant_id, @connector_id, @mutation_type, @run_id, @timestamp,
@cursor, @artifact_hash, @artifact_kind,
@documents_processed, @claims_generated,
@error_code, @error_message, @retry_after_seconds,
@idempotency_key)
RETURNING sequence_number;
""";
await using var command = CreateCommand(insertSql, connection);
AddParameter(command, "tenant_id", tenant);
AddParameter(command, "connector_id", connectorId);
AddParameter(command, "mutation_type", mutation.Type.ToString());
AddParameter(command, "run_id", mutation.RunId);
AddParameter(command, "timestamp", mutation.Timestamp.UtcDateTime);
AddParameter(command, "cursor", mutation.Cursor);
AddParameter(command, "artifact_hash", mutation.ArtifactHash);
AddParameter(command, "artifact_kind", mutation.ArtifactKind);
AddParameter(command, "documents_processed", mutation.DocumentsProcessed);
AddParameter(command, "claims_generated", mutation.ClaimsGenerated);
AddParameter(command, "error_code", mutation.ErrorCode);
AddParameter(command, "error_message", Truncate(mutation.ErrorMessage, 512));
AddParameter(command, "retry_after_seconds", mutation.RetryAfterSeconds);
AddParameter(command, "idempotency_key", mutation.IdempotencyKey);
var sequenceNumber = (long)(await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false))!;
// Update materialized state
await UpdateMaterializedStateAsync(tenant, connectorId, cancellationToken).ConfigureAwait(false);
var newState = await GetCurrentStateAsync(tenant, connectorId, cancellationToken).ConfigureAwait(false)
?? CheckpointState.Initial(connectorId);
return AppendCheckpointResult.Appended(sequenceNumber, newState);
}
public async ValueTask<CheckpointState?> GetCurrentStateAsync(
string tenant,
string connectorId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
await EnsureTablesAsync(cancellationToken).ConfigureAwait(false);
await using var connection = await DataSource.OpenConnectionAsync(tenant, "reader", cancellationToken).ConfigureAwait(false);
const string sql = """
SELECT connector_id, cursor, last_updated, last_run_id, last_mutation_type,
last_artifact_hash, last_artifact_kind,
total_documents_processed, total_claims_generated,
success_count, failure_count, last_error_code,
next_eligible_run, latest_sequence_number
FROM vex.checkpoint_states
WHERE tenant_id = @tenant_id AND connector_id = @connector_id;
""";
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenant);
AddParameter(command, "connector_id", connectorId);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
return MapState(reader);
}
public async ValueTask<IReadOnlyList<CheckpointMutationEvent>> GetMutationLogAsync(
string tenant,
string connectorId,
long? sinceSequence,
int limit,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
limit = Math.Clamp(limit, 1, 1000);
await EnsureTablesAsync(cancellationToken).ConfigureAwait(false);
await using var connection = await DataSource.OpenConnectionAsync(tenant, "reader", cancellationToken).ConfigureAwait(false);
var sql = """
SELECT sequence_number, mutation_type, run_id, timestamp,
cursor, artifact_hash, artifact_kind,
documents_processed, claims_generated,
error_code, error_message, retry_after_seconds,
idempotency_key
FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id
""";
if (sinceSequence.HasValue)
{
sql += " AND sequence_number > @since_sequence";
}
sql += " ORDER BY sequence_number ASC LIMIT @limit;";
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenant);
AddParameter(command, "connector_id", connectorId);
AddParameter(command, "limit", limit);
if (sinceSequence.HasValue)
{
AddParameter(command, "since_sequence", sinceSequence.Value);
}
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var results = new List<CheckpointMutationEvent>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
results.Add(MapMutation(reader));
}
return results;
}
public async ValueTask<CheckpointState?> ReplayToSequenceAsync(
string tenant,
string connectorId,
long upToSequence,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
await EnsureTablesAsync(cancellationToken).ConfigureAwait(false);
// Get all mutations up to the specified sequence
await using var connection = await DataSource.OpenConnectionAsync(tenant, "reader", cancellationToken).ConfigureAwait(false);
const string sql = """
SELECT sequence_number, mutation_type, run_id, timestamp,
cursor, artifact_hash, artifact_kind,
documents_processed, claims_generated,
error_code, error_message, retry_after_seconds,
idempotency_key
FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id
AND sequence_number <= @up_to_sequence
ORDER BY sequence_number ASC;
""";
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenant);
AddParameter(command, "connector_id", connectorId);
AddParameter(command, "up_to_sequence", upToSequence);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var state = CheckpointState.Initial(connectorId);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
var mutation = MapMutation(reader);
state = state.Apply(mutation);
}
return state.LatestSequenceNumber > 0 ? state : null;
}
private async ValueTask<CheckpointMutationEvent?> FindByIdempotencyKeyAsync(
string tenant,
string connectorId,
string idempotencyKey,
CancellationToken cancellationToken)
{
await using var connection = await DataSource.OpenConnectionAsync(tenant, "reader", cancellationToken).ConfigureAwait(false);
const string sql = """
SELECT sequence_number, mutation_type, run_id, timestamp,
cursor, artifact_hash, artifact_kind,
documents_processed, claims_generated,
error_code, error_message, retry_after_seconds,
idempotency_key
FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND idempotency_key = @idempotency_key;
""";
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenant);
AddParameter(command, "connector_id", connectorId);
AddParameter(command, "idempotency_key", idempotencyKey);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
return MapMutation(reader);
}
private async ValueTask UpdateMaterializedStateAsync(
string tenant,
string connectorId,
CancellationToken cancellationToken)
{
await using var connection = await DataSource.OpenConnectionAsync(tenant, "writer", cancellationToken).ConfigureAwait(false);
// Compute state from mutation log and upsert into materialized table
const string sql = """
INSERT INTO vex.checkpoint_states (
tenant_id, connector_id, cursor, last_updated, last_run_id, last_mutation_type,
last_artifact_hash, last_artifact_kind,
total_documents_processed, total_claims_generated,
success_count, failure_count, last_error_code,
next_eligible_run, latest_sequence_number)
SELECT
@tenant_id,
@connector_id,
(SELECT cursor FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND cursor IS NOT NULL
ORDER BY sequence_number DESC LIMIT 1),
(SELECT MAX(timestamp) FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id),
(SELECT run_id FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id
ORDER BY sequence_number DESC LIMIT 1),
(SELECT mutation_type FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id
ORDER BY sequence_number DESC LIMIT 1),
(SELECT artifact_hash FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND artifact_hash IS NOT NULL
ORDER BY sequence_number DESC LIMIT 1),
(SELECT artifact_kind FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND artifact_kind IS NOT NULL
ORDER BY sequence_number DESC LIMIT 1),
COALESCE((SELECT SUM(documents_processed) FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id), 0),
COALESCE((SELECT SUM(claims_generated) FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id), 0),
(SELECT COUNT(*) FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND mutation_type = 'Completed'),
(SELECT COUNT(*) FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND mutation_type = 'Failed'),
(SELECT error_code FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND mutation_type = 'Failed'
ORDER BY sequence_number DESC LIMIT 1),
(SELECT timestamp + (retry_after_seconds || ' seconds')::interval
FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id AND mutation_type = 'Failed'
AND retry_after_seconds IS NOT NULL
ORDER BY sequence_number DESC LIMIT 1),
(SELECT MAX(sequence_number) FROM vex.checkpoint_mutations
WHERE tenant_id = @tenant_id AND connector_id = @connector_id)
ON CONFLICT (tenant_id, connector_id) DO UPDATE SET
cursor = EXCLUDED.cursor,
last_updated = EXCLUDED.last_updated,
last_run_id = EXCLUDED.last_run_id,
last_mutation_type = EXCLUDED.last_mutation_type,
last_artifact_hash = EXCLUDED.last_artifact_hash,
last_artifact_kind = EXCLUDED.last_artifact_kind,
total_documents_processed = EXCLUDED.total_documents_processed,
total_claims_generated = EXCLUDED.total_claims_generated,
success_count = EXCLUDED.success_count,
failure_count = EXCLUDED.failure_count,
last_error_code = EXCLUDED.last_error_code,
next_eligible_run = EXCLUDED.next_eligible_run,
latest_sequence_number = EXCLUDED.latest_sequence_number;
""";
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenant);
AddParameter(command, "connector_id", connectorId);
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
private CheckpointState MapState(NpgsqlDataReader reader)
{
var connectorId = reader.GetString(0);
var cursor = reader.IsDBNull(1) ? null : reader.GetString(1);
var lastUpdated = reader.IsDBNull(2) ? DateTimeOffset.MinValue : new DateTimeOffset(reader.GetDateTime(2), TimeSpan.Zero);
var lastRunId = reader.IsDBNull(3) ? (Guid?)null : reader.GetGuid(3);
var lastMutationTypeStr = reader.IsDBNull(4) ? null : reader.GetString(4);
var lastMutationType = !string.IsNullOrEmpty(lastMutationTypeStr)
? Enum.Parse<CheckpointMutationType>(lastMutationTypeStr)
: (CheckpointMutationType?)null;
var lastArtifactHash = reader.IsDBNull(5) ? null : reader.GetString(5);
var lastArtifactKind = reader.IsDBNull(6) ? null : reader.GetString(6);
var totalDocsProcessed = reader.IsDBNull(7) ? 0 : reader.GetInt32(7);
var totalClaimsGenerated = reader.IsDBNull(8) ? 0 : reader.GetInt32(8);
var successCount = reader.IsDBNull(9) ? 0 : reader.GetInt32(9);
var failureCount = reader.IsDBNull(10) ? 0 : reader.GetInt32(10);
var lastErrorCode = reader.IsDBNull(11) ? null : reader.GetString(11);
var nextEligible = reader.IsDBNull(12) ? (DateTimeOffset?)null : new DateTimeOffset(reader.GetDateTime(12), TimeSpan.Zero);
var latestSeq = reader.IsDBNull(13) ? 0L : reader.GetInt64(13);
return new CheckpointState(
connectorId,
cursor,
lastUpdated,
lastRunId,
lastMutationType,
lastArtifactHash,
lastArtifactKind,
totalDocsProcessed,
totalClaimsGenerated,
successCount,
failureCount,
lastErrorCode,
nextEligible,
latestSeq);
}
private CheckpointMutationEvent MapMutation(NpgsqlDataReader reader)
{
return new CheckpointMutationEvent(
SequenceNumber: reader.GetInt64(0),
Type: Enum.Parse<CheckpointMutationType>(reader.GetString(1)),
RunId: reader.GetGuid(2),
Timestamp: new DateTimeOffset(reader.GetDateTime(3), TimeSpan.Zero),
Cursor: reader.IsDBNull(4) ? null : reader.GetString(4),
ArtifactHash: reader.IsDBNull(5) ? null : reader.GetString(5),
ArtifactKind: reader.IsDBNull(6) ? null : reader.GetString(6),
DocumentsProcessed: reader.IsDBNull(7) ? null : reader.GetInt32(7),
ClaimsGenerated: reader.IsDBNull(8) ? null : reader.GetInt32(8),
ErrorCode: reader.IsDBNull(9) ? null : reader.GetString(9),
ErrorMessage: reader.IsDBNull(10) ? null : reader.GetString(10),
RetryAfterSeconds: reader.IsDBNull(11) ? null : reader.GetInt32(11),
IdempotencyKey: reader.IsDBNull(12) ? null : reader.GetString(12));
}
private async ValueTask EnsureTablesAsync(CancellationToken cancellationToken)
{
if (_initialized)
{
return;
}
await _initLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_initialized)
{
return;
}
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
// Create append-only mutations table
const string mutationsSql = """
CREATE TABLE IF NOT EXISTS vex.checkpoint_mutations (
sequence_number bigserial PRIMARY KEY,
tenant_id text NOT NULL,
connector_id text NOT NULL,
mutation_type text NOT NULL,
run_id uuid NOT NULL,
timestamp timestamptz NOT NULL,
cursor text,
artifact_hash text,
artifact_kind text,
documents_processed integer,
claims_generated integer,
error_code text,
error_message text,
retry_after_seconds integer,
idempotency_key text,
created_at timestamptz NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS idx_checkpoint_mutations_tenant_connector
ON vex.checkpoint_mutations (tenant_id, connector_id, sequence_number);
CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoint_mutations_idempotency
ON vex.checkpoint_mutations (tenant_id, connector_id, idempotency_key)
WHERE idempotency_key IS NOT NULL;
""";
await using var mutationsCommand = CreateCommand(mutationsSql, connection);
await mutationsCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
// Create materialized state table
const string statesSql = """
CREATE TABLE IF NOT EXISTS vex.checkpoint_states (
tenant_id text NOT NULL,
connector_id text NOT NULL,
cursor text,
last_updated timestamptz,
last_run_id uuid,
last_mutation_type text,
last_artifact_hash text,
last_artifact_kind text,
total_documents_processed integer NOT NULL DEFAULT 0,
total_claims_generated integer NOT NULL DEFAULT 0,
success_count integer NOT NULL DEFAULT 0,
failure_count integer NOT NULL DEFAULT 0,
last_error_code text,
next_eligible_run timestamptz,
latest_sequence_number bigint NOT NULL DEFAULT 0,
PRIMARY KEY (tenant_id, connector_id)
);
""";
await using var statesCommand = CreateCommand(statesSql, connection);
await statesCommand.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
_initialized = true;
}
finally
{
_initLock.Release();
}
}
private static string? Truncate(string? value, int maxLength)
{
if (string.IsNullOrEmpty(value))
{
return value;
}
return value.Length <= maxLength ? value : value[..maxLength];
}
}

View File

@@ -36,6 +36,9 @@ public static class ServiceCollectionExtensions
services.AddScoped<IVexRawStore, PostgresVexRawStore>();
services.AddScoped<IVexConnectorStateRepository, PostgresConnectorStateRepository>();
// Register append-only checkpoint store for deterministic persistence (EXCITITOR-ORCH-32/33)
services.AddScoped<IAppendOnlyCheckpointStore, PostgresAppendOnlyCheckpointStore>();
return services;
}
@@ -59,6 +62,9 @@ public static class ServiceCollectionExtensions
services.AddScoped<IVexRawStore, PostgresVexRawStore>();
services.AddScoped<IVexConnectorStateRepository, PostgresConnectorStateRepository>();
// Register append-only checkpoint store for deterministic persistence (EXCITITOR-ORCH-32/33)
services.AddScoped<IAppendOnlyCheckpointStore, PostgresAppendOnlyCheckpointStore>();
return services;
}
}

View File

@@ -1,5 +1,9 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Adapters.Trivy;
using StellaOps.ExportCenter.Core.Encryption;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters;
@@ -40,7 +44,12 @@ public sealed class ExportAdapterRegistry : IExportAdapterRegistry
public ExportAdapterRegistry(IEnumerable<IExportAdapter> adapters)
{
_adapters = adapters.ToDictionary(a => a.AdapterId, StringComparer.OrdinalIgnoreCase);
// Last adapter wins for duplicate adapter IDs
_adapters = new Dictionary<string, IExportAdapter>(StringComparer.OrdinalIgnoreCase);
foreach (var adapter in adapters)
{
_adapters[adapter.AdapterId] = adapter;
}
// Build format to adapter map (first adapter wins for each format)
_formatMap = new Dictionary<ExportFormat, IExportAdapter>();
@@ -85,6 +94,47 @@ public static class ExportAdapterServiceExtensions
// Register individual adapters
services.AddSingleton<IExportAdapter, JsonRawAdapter>();
services.AddSingleton<IExportAdapter, JsonPolicyAdapter>();
services.AddSingleton<IExportAdapter>(sp =>
new MirrorAdapter(
sp.GetRequiredService<ILogger<MirrorAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register Trivy DB adapter
services.AddSingleton<IExportAdapter>(sp =>
new TrivyDbAdapter(
sp.GetRequiredService<ILogger<TrivyDbAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register Trivy Java DB adapter
services.AddSingleton<IExportAdapter>(sp =>
new TrivyJavaDbAdapter(
sp.GetRequiredService<ILogger<TrivyJavaDbAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register mirror delta infrastructure
services.AddSingleton<IMirrorBaseManifestStore, InMemoryMirrorBaseManifestStore>();
services.AddSingleton<IMirrorContentStore>(sp =>
new InMemoryMirrorContentStore(sp.GetRequiredService<ICryptoHash>()));
services.AddSingleton<IMirrorDeltaService, MirrorDeltaService>();
// Register Mirror Delta adapter
services.AddSingleton<IExportAdapter>(sp =>
new MirrorDeltaAdapter(
sp.GetRequiredService<ILogger<MirrorDeltaAdapter>>(),
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<IMirrorDeltaService>(),
sp.GetRequiredService<IMirrorBaseManifestStore>(),
sp.GetService<IMirrorContentStore>()));
// Register encryption services
services.AddSingleton<IAgeKeyWrapper, StubAgeKeyWrapper>();
// Note: IKmsKeyWrapper should be registered by specific KMS implementations (AWS, Azure, etc.)
services.AddSingleton<IBundleEncryptionService>(sp =>
new BundleEncryptionService(
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<ILogger<BundleEncryptionService>>(),
sp.GetService<IAgeKeyWrapper>(),
sp.GetService<IKmsKeyWrapper>()));
// Register the registry
services.AddSingleton<IExportAdapterRegistry>(sp =>

View File

@@ -0,0 +1,414 @@
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters;
/// <summary>
/// Export adapter that produces mirror bundles with filesystem layout, indexes, and manifests.
/// </summary>
public sealed class MirrorAdapter : IExportAdapter
{
private const string DefaultBundleFileName = "export-mirror-bundle-v1.tgz";
private readonly ILogger<MirrorAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
public MirrorAdapter(ILogger<MirrorAdapter> logger, ICryptoHash cryptoHash)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
/// <inheritdoc />
public string AdapterId => "mirror:standard";
/// <inheritdoc />
public string DisplayName => "Mirror Bundle";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.Mirror];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
_logger.LogInformation(
"Starting mirror bundle export for {ItemCount} items",
context.Items.Count);
// Create temp directory for staging files
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Collect and write items to temp files
var itemResults = new List<AdapterItemResult>();
var dataSources = await CollectDataSourcesAsync(
context,
tempDir,
itemResults,
cancellationToken);
if (dataSources.Count == 0)
{
_logger.LogWarning("No data sources collected for mirror bundle");
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
ManifestCounts = BuildManifestCounts(itemResults),
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
// Extract selectors from items
var selectors = ExtractSelectors(context.Items);
// Build the mirror bundle
var request = new MirrorBundleBuildRequest(
Guid.TryParse(context.CorrelationId, out var runId) ? runId : Guid.NewGuid(),
context.TenantId,
MirrorBundleVariant.Full,
selectors,
dataSources);
var builder = new MirrorBundleBuilder(_cryptoHash, context.TimeProvider);
var buildResult = builder.Build(request, cancellationToken);
// Write the bundle to output directory
var outputPath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-mirror-bundle-v1.tgz");
await using (var outputStream = new FileStream(
outputPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 128 * 1024,
useAsync: true))
{
buildResult.BundleStream.Position = 0;
await buildResult.BundleStream.CopyToAsync(outputStream, cancellationToken);
}
// Write checksum file if requested
var checksumPath = outputPath + ".sha256";
if (context.Config.IncludeChecksums)
{
var checksumContent = $"{buildResult.RootHash} {Path.GetFileName(outputPath)}\n";
await File.WriteAllTextAsync(checksumPath, checksumContent, cancellationToken);
}
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = outputPath,
SizeBytes = new FileInfo(outputPath).Length,
Sha256 = buildResult.RootHash,
ContentType = "application/gzip",
ItemCount = dataSources.Count,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 1,
TotalSizeBytes = artifact.SizeBytes,
ByKind = BuildKindCounts(context.Items, itemResults),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
_logger.LogInformation(
"Mirror bundle created: {Path} ({Bytes} bytes, {ItemCount} items, hash: {Hash})",
outputPath,
artifact.SizeBytes,
dataSources.Count,
buildResult.RootHash);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build mirror bundle");
return ExportAdapterResult.Failed($"Mirror bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Mirror adapter doesn't support streaming - all items must be processed together
// to build a single bundle
_logger.LogWarning("Mirror adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private async Task<List<MirrorBundleDataSource>> CollectDataSourcesAsync(
ExportAdapterContext context,
string tempDir,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
var dataSources = new List<MirrorBundleDataSource>();
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Determine category from item kind
var category = MapKindToCategory(item.Kind);
if (category is null)
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
$"Unknown item kind: {item.Kind}"));
continue;
}
// Create temp file for this item
var fileName = SanitizeFileName($"{item.Kind}-{item.Name ?? item.ItemId.ToString("N")}.json");
var categoryDir = Path.Combine(tempDir, category.Value.ToString().ToLowerInvariant());
Directory.CreateDirectory(categoryDir);
var tempFilePath = Path.Combine(categoryDir, fileName);
// Apply normalization if configured
var jsonContent = content.JsonContent!;
if (context.Config.FormatOptions.SortKeys || context.Config.FormatOptions.NormalizeTimestamps)
{
var normalizer = new JsonNormalizer(new JsonNormalizationOptions
{
SortKeys = context.Config.FormatOptions.SortKeys,
NormalizeTimestamps = context.Config.FormatOptions.NormalizeTimestamps
});
var normalized = normalizer.Normalize(jsonContent);
if (normalized.Success && normalized.NormalizedJson is not null)
{
jsonContent = normalized.NormalizedJson;
}
}
await File.WriteAllTextAsync(tempFilePath, jsonContent, cancellationToken);
dataSources.Add(new MirrorBundleDataSource(
category.Value,
tempFilePath,
context.Config.FormatOptions.SortKeys,
item.SourceRef));
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
OutputPath = tempFilePath,
OutputSizeBytes = new FileInfo(tempFilePath).Length,
ContentHash = content.OriginalHash,
ProcessedAt = DateTimeOffset.UtcNow
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
return dataSources;
}
private static MirrorBundleDataCategory? MapKindToCategory(string kind)
{
return kind.ToLowerInvariant() switch
{
"advisory" => MirrorBundleDataCategory.Advisories,
"advisories" => MirrorBundleDataCategory.Advisories,
"vex" => MirrorBundleDataCategory.Vex,
"sbom" => MirrorBundleDataCategory.Sbom,
"policy-snapshot" => MirrorBundleDataCategory.PolicySnapshot,
"policy-evaluations" => MirrorBundleDataCategory.PolicyEvaluations,
"policy-result" => MirrorBundleDataCategory.PolicyEvaluations,
"vex-consensus" => MirrorBundleDataCategory.VexConsensus,
"findings" => MirrorBundleDataCategory.Findings,
"scan-report" => MirrorBundleDataCategory.Findings,
_ => null
};
}
private static MirrorBundleSelectors ExtractSelectors(IReadOnlyList<ResolvedExportItem> items)
{
// Extract unique source refs as products
var products = items
.Select(i => i.SourceRef)
.Where(s => !string.IsNullOrWhiteSpace(s))
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
// Extract time window from item timestamps
var minCreated = items.Where(i => i.CreatedAt != default).Min(i => i.CreatedAt);
var maxCreated = items.Where(i => i.CreatedAt != default).Max(i => i.CreatedAt);
// Extract ecosystems from metadata if available
var ecosystems = items
.Where(i => i.Metadata.TryGetValue("ecosystem", out _))
.Select(i => i.Metadata["ecosystem"])
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
return new MirrorBundleSelectors(
products.Count > 0 ? products : ["*"],
minCreated != default ? minCreated : null,
maxCreated != default ? maxCreated : null,
ecosystems.Count > 0 ? ecosystems : null);
}
private static string SanitizeFileName(string name)
{
if (string.IsNullOrWhiteSpace(name))
{
return "item.json";
}
var result = name.Trim().ToLowerInvariant();
foreach (var invalid in Path.GetInvalidFileNameChars())
{
result = result.Replace(invalid, '_');
}
result = result.Replace('/', '_').Replace('\\', '_');
// Limit length
if (result.Length > 64)
{
var ext = Path.GetExtension(result);
result = result[..(60 - ext.Length)] + ext;
}
return string.IsNullOrWhiteSpace(result) ? "item.json" : result;
}
private static ExportManifestCounts BuildManifestCounts(IReadOnlyList<AdapterItemResult> itemResults)
{
return new ExportManifestCounts
{
TotalItems = itemResults.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 0,
TotalSizeBytes = 0,
ByKind = new Dictionary<string, int>(),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
}
private static IReadOnlyDictionary<string, int> BuildKindCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count());
}
}

View File

@@ -0,0 +1,658 @@
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters;
/// <summary>
/// Export adapter that produces delta mirror bundles with content-addressed reuse.
/// Only includes items that have changed since the base export.
/// </summary>
public sealed class MirrorDeltaAdapter : IExportAdapter
{
private readonly ILogger<MirrorDeltaAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly IMirrorDeltaService _deltaService;
private readonly IMirrorContentStore? _contentStore;
private readonly IMirrorBaseManifestStore _manifestStore;
public MirrorDeltaAdapter(
ILogger<MirrorDeltaAdapter> logger,
ICryptoHash cryptoHash,
IMirrorDeltaService deltaService,
IMirrorBaseManifestStore manifestStore,
IMirrorContentStore? contentStore = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_deltaService = deltaService ?? throw new ArgumentNullException(nameof(deltaService));
_manifestStore = manifestStore ?? throw new ArgumentNullException(nameof(manifestStore));
_contentStore = contentStore;
}
/// <inheritdoc />
public string AdapterId => "mirror:delta";
/// <inheritdoc />
public string DisplayName => "Mirror Delta Bundle";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.Mirror];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
// Extract delta options from metadata
var deltaOptions = ExtractDeltaOptions(context);
if (deltaOptions is null)
{
return ExportAdapterResult.Failed(
"Delta options required: provide 'baseExportId' and 'baseManifestDigest' in context metadata");
}
_logger.LogInformation(
"Starting mirror delta export against base {BaseExportId} for {ItemCount} items",
deltaOptions.BaseExportId, context.Items.Count);
// Create temp directory for staging files
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-delta-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Collect and hash all current items
var itemResults = new List<AdapterItemResult>();
var currentItems = await CollectCurrentItemsAsync(
context,
tempDir,
itemResults,
cancellationToken);
if (currentItems.Count == 0)
{
_logger.LogWarning("No items collected for delta comparison");
return CreateEmptyResult(context, startTime);
}
// Compute delta against base
var deltaRequest = new MirrorDeltaComputeRequest
{
BaseRunId = Guid.Parse(deltaOptions.BaseExportId),
BaseManifestDigest = deltaOptions.BaseManifestDigest,
TenantId = context.TenantId,
CurrentItems = currentItems,
ResetBaseline = deltaOptions.ResetBaseline
};
var deltaResult = await _deltaService.ComputeDeltaAsync(deltaRequest, cancellationToken);
if (!deltaResult.Success)
{
return ExportAdapterResult.Failed(deltaResult.ErrorMessage ?? "Delta computation failed");
}
// If no changes, return early with empty delta
if (deltaResult.AddedItems.Count == 0 &&
deltaResult.ChangedItems.Count == 0 &&
deltaResult.RemovedItems.Count == 0)
{
_logger.LogInformation("No changes detected since base export {BaseExportId}", deltaOptions.BaseExportId);
return CreateNoChangesResult(context, deltaResult, startTime);
}
// Build data sources only for changed items (reuse unchanged from content store)
var dataSources = await BuildDeltaDataSourcesAsync(
deltaResult,
tempDir,
cancellationToken);
// Build selectors from changed items
var selectors = ExtractSelectors(context.Items);
// Create the delta bundle request
var bundleRequest = new MirrorBundleBuildRequest(
Guid.TryParse(context.CorrelationId, out var runId) ? runId : Guid.NewGuid(),
context.TenantId,
MirrorBundleVariant.Delta,
selectors,
dataSources,
DeltaOptions: new MirrorBundleDeltaOptions(
deltaOptions.BaseExportId,
deltaResult.BaseManifestDigest ?? deltaOptions.BaseManifestDigest,
deltaOptions.ResetBaseline));
var builder = new MirrorBundleBuilder(_cryptoHash, context.TimeProvider);
var buildResult = builder.Build(bundleRequest, cancellationToken);
// Write the bundle to output directory
var outputPath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-mirror-delta-v1.tgz");
await using (var outputStream = new FileStream(
outputPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 128 * 1024,
useAsync: true))
{
buildResult.BundleStream.Position = 0;
await buildResult.BundleStream.CopyToAsync(outputStream, cancellationToken);
}
// Write checksum file if requested
if (context.Config.IncludeChecksums)
{
var checksumContent = $"{buildResult.RootHash} {Path.GetFileName(outputPath)}\n";
await File.WriteAllTextAsync(outputPath + ".sha256", checksumContent, cancellationToken);
}
// Write removed items manifest
if (deltaResult.RemovedItems.Count > 0)
{
var removedPath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-delta-removed.jsonl");
await WriteRemovedManifestAsync(deltaResult.RemovedItems, removedPath, cancellationToken);
}
// Save manifest entries for future delta comparisons
var manifestEntries = currentItems
.Select(i => new MirrorBaseManifestEntry
{
ItemId = i.ItemId,
Category = i.Category,
BundlePath = i.BundlePath,
ContentHash = i.ContentHash,
SizeBytes = i.SizeBytes
})
.ToList();
await _manifestStore.SaveManifestEntriesAsync(
bundleRequest.RunId,
context.TenantId,
buildResult.Manifest.Delta?.BaseManifestDigest ?? buildResult.RootHash,
manifestEntries,
cancellationToken);
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = outputPath,
SizeBytes = new FileInfo(outputPath).Length,
Sha256 = buildResult.RootHash,
ContentType = "application/gzip",
ItemCount = dataSources.Count,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = BuildManifestCounts(context.Items, itemResults, deltaResult, artifact.SizeBytes);
_logger.LogInformation(
"Mirror delta bundle created: {Path} ({Bytes} bytes, {Added} added, {Changed} changed, {Removed} removed)",
outputPath,
artifact.SizeBytes,
deltaResult.AddedItems.Count,
deltaResult.ChangedItems.Count,
deltaResult.RemovedItems.Count);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build mirror delta bundle");
return ExportAdapterResult.Failed($"Mirror delta bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
_logger.LogWarning("Mirror delta adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private static MirrorBundleDeltaOptions? ExtractDeltaOptions(ExportAdapterContext context)
{
// Check if we have a metadata dict with delta options in the context
// This would typically come from ExportPlan or ExportProfile configuration
var correlationParts = context.CorrelationId?.Split('|');
if (correlationParts?.Length >= 3)
{
return new MirrorBundleDeltaOptions(
correlationParts[1],
correlationParts[2],
correlationParts.Length > 3 && bool.TryParse(correlationParts[3], out var reset) && reset);
}
return null;
}
private async Task<List<MirrorDeltaItem>> CollectCurrentItemsAsync(
ExportAdapterContext context,
string tempDir,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
var items = new List<MirrorDeltaItem>();
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Determine category and bundle path
var category = MapKindToCategory(item.Kind);
if (category is null)
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
$"Unknown item kind: {item.Kind}"));
continue;
}
// Normalize content if configured
var jsonContent = content.JsonContent!;
if (context.Config.FormatOptions.SortKeys || context.Config.FormatOptions.NormalizeTimestamps)
{
var normalizer = new JsonNormalizer(new JsonNormalizationOptions
{
SortKeys = context.Config.FormatOptions.SortKeys,
NormalizeTimestamps = context.Config.FormatOptions.NormalizeTimestamps
});
var normalized = normalizer.Normalize(jsonContent);
if (normalized.Success && normalized.NormalizedJson is not null)
{
jsonContent = normalized.NormalizedJson;
}
}
// Compute content hash
var contentBytes = System.Text.Encoding.UTF8.GetBytes(jsonContent);
var contentHash = _cryptoHash.ComputeHashHexForPurpose(contentBytes, HashPurpose.Content);
// Write to temp file
var fileName = SanitizeFileName($"{item.Kind}-{item.Name ?? item.ItemId.ToString("N")}.json");
var categoryDir = Path.Combine(tempDir, category.Value.ToString().ToLowerInvariant());
Directory.CreateDirectory(categoryDir);
var tempFilePath = Path.Combine(categoryDir, fileName);
await File.WriteAllTextAsync(tempFilePath, jsonContent, cancellationToken);
// Compute bundle path
var bundlePath = ComputeBundlePath(category.Value, fileName, context.Config.FormatOptions.SortKeys, item.SourceRef);
items.Add(new MirrorDeltaItem
{
ItemId = item.ItemId.ToString("D"),
Category = category.Value,
ContentHash = contentHash,
BundlePath = bundlePath,
SizeBytes = contentBytes.LongLength,
ModifiedAt = item.CreatedAt != default ? item.CreatedAt : context.TimeProvider.GetUtcNow(),
SourcePath = tempFilePath
});
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
OutputPath = tempFilePath,
OutputSizeBytes = contentBytes.LongLength,
ContentHash = contentHash,
ProcessedAt = context.TimeProvider.GetUtcNow()
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
return items;
}
private async Task<List<MirrorBundleDataSource>> BuildDeltaDataSourcesAsync(
MirrorDeltaComputeResult deltaResult,
string tempDir,
CancellationToken cancellationToken)
{
var dataSources = new List<MirrorBundleDataSource>();
// Add all added items
foreach (var item in deltaResult.AddedItems)
{
if (string.IsNullOrEmpty(item.SourcePath))
continue;
dataSources.Add(new MirrorBundleDataSource(
item.Category,
item.SourcePath,
true,
item.ItemId));
}
// Add all changed items
foreach (var change in deltaResult.ChangedItems)
{
var item = change.Current;
if (string.IsNullOrEmpty(item.SourcePath))
continue;
dataSources.Add(new MirrorBundleDataSource(
item.Category,
item.SourcePath,
true,
item.ItemId));
}
// For unchanged items, try to reuse from content store if available
if (_contentStore is not null)
{
foreach (var item in deltaResult.UnchangedItems)
{
var localPath = _contentStore.GetLocalPath(item.ContentHash);
if (!string.IsNullOrEmpty(localPath) && File.Exists(localPath))
{
dataSources.Add(new MirrorBundleDataSource(
item.Category,
localPath,
true,
item.ItemId));
}
else if (!string.IsNullOrEmpty(item.SourcePath))
{
// Fall back to source path if content store doesn't have it
dataSources.Add(new MirrorBundleDataSource(
item.Category,
item.SourcePath,
true,
item.ItemId));
}
}
}
await Task.CompletedTask; // Placeholder for potential async content store operations
return dataSources;
}
private static async Task WriteRemovedManifestAsync(
IReadOnlyList<MirrorDeltaRemovedItem> removedItems,
string outputPath,
CancellationToken cancellationToken)
{
await using var writer = new StreamWriter(outputPath, append: false, System.Text.Encoding.UTF8);
foreach (var item in removedItems.OrderBy(i => i.BundlePath, StringComparer.Ordinal))
{
var json = System.Text.Json.JsonSerializer.Serialize(new
{
itemId = item.ItemId,
category = item.Category.ToString().ToLowerInvariant(),
bundlePath = item.BundlePath,
contentHash = item.ContentHash
});
await writer.WriteLineAsync(json);
}
}
private static MirrorBundleDataCategory? MapKindToCategory(string kind)
{
return kind.ToLowerInvariant() switch
{
"advisory" => MirrorBundleDataCategory.Advisories,
"advisories" => MirrorBundleDataCategory.Advisories,
"vex" => MirrorBundleDataCategory.Vex,
"sbom" => MirrorBundleDataCategory.Sbom,
"policy-snapshot" => MirrorBundleDataCategory.PolicySnapshot,
"policy-evaluations" => MirrorBundleDataCategory.PolicyEvaluations,
"policy-result" => MirrorBundleDataCategory.PolicyEvaluations,
"vex-consensus" => MirrorBundleDataCategory.VexConsensus,
"findings" => MirrorBundleDataCategory.Findings,
"scan-report" => MirrorBundleDataCategory.Findings,
_ => null
};
}
private static string ComputeBundlePath(MirrorBundleDataCategory category, string fileName, bool isNormalized, string? subjectId)
{
var prefix = isNormalized ? "data/normalized" : "data/raw";
return category switch
{
MirrorBundleDataCategory.Advisories => $"{prefix}/advisories/{fileName}",
MirrorBundleDataCategory.Vex => $"{prefix}/vex/{fileName}",
MirrorBundleDataCategory.Sbom when !string.IsNullOrEmpty(subjectId) =>
$"data/raw/sboms/{SanitizeSegment(subjectId)}/{fileName}",
MirrorBundleDataCategory.Sbom => $"data/raw/sboms/{fileName}",
MirrorBundleDataCategory.PolicySnapshot => "data/policy/snapshot.json",
MirrorBundleDataCategory.PolicyEvaluations => $"data/policy/{fileName}",
MirrorBundleDataCategory.VexConsensus => $"data/consensus/{fileName}",
MirrorBundleDataCategory.Findings => $"data/findings/{fileName}",
_ => $"data/other/{fileName}"
};
}
private static string SanitizeSegment(string value)
{
if (string.IsNullOrWhiteSpace(value))
return "subject";
var builder = new System.Text.StringBuilder(value.Length);
foreach (var ch in value.Trim())
{
if (char.IsLetterOrDigit(ch))
builder.Append(char.ToLowerInvariant(ch));
else if (ch is '-' or '_' or '.')
builder.Append(ch);
else
builder.Append('-');
}
return builder.Length == 0 ? "subject" : builder.ToString();
}
private static string SanitizeFileName(string name)
{
if (string.IsNullOrWhiteSpace(name))
return "item.json";
var result = name.Trim().ToLowerInvariant();
foreach (var invalid in Path.GetInvalidFileNameChars())
{
result = result.Replace(invalid, '_');
}
result = result.Replace('/', '_').Replace('\\', '_');
if (result.Length > 64)
{
var ext = Path.GetExtension(result);
result = result[..(60 - ext.Length)] + ext;
}
return string.IsNullOrWhiteSpace(result) ? "item.json" : result;
}
private static MirrorBundleSelectors ExtractSelectors(IReadOnlyList<ResolvedExportItem> items)
{
var products = items
.Select(i => i.SourceRef)
.Where(s => !string.IsNullOrWhiteSpace(s))
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
var timestamps = items.Where(i => i.CreatedAt != default).Select(i => i.CreatedAt).ToList();
var minCreated = timestamps.Count > 0 ? timestamps.Min() : default;
var maxCreated = timestamps.Count > 0 ? timestamps.Max() : default;
var ecosystems = items
.Where(i => i.Metadata.TryGetValue("ecosystem", out _))
.Select(i => i.Metadata["ecosystem"])
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
return new MirrorBundleSelectors(
products.Count > 0 ? products : ["*"],
minCreated != default ? minCreated : null,
maxCreated != default ? maxCreated : null,
ecosystems.Count > 0 ? ecosystems : null);
}
private ExportAdapterResult CreateEmptyResult(ExportAdapterContext context, DateTimeOffset startTime)
{
return new ExportAdapterResult
{
Success = true,
ItemResults = [],
Artifacts = [],
ManifestCounts = new ExportManifestCounts(),
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
private ExportAdapterResult CreateNoChangesResult(
ExportAdapterContext context,
MirrorDeltaComputeResult deltaResult,
DateTimeOffset startTime)
{
_logger.LogInformation(
"Delta export completed with no changes. Base: {BaseExportId}",
deltaResult.BaseExportId);
return new ExportAdapterResult
{
Success = true,
ItemResults = [],
Artifacts = [],
ManifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = context.Items.Count,
SuccessfulItems = context.Items.Count,
SkippedItems = context.Items.Count, // All items skipped due to no changes
ByStatus = new Dictionary<string, int>
{
["unchanged"] = deltaResult.UnchangedItems.Count
}
},
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
private static ExportManifestCounts BuildManifestCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results,
MirrorDeltaComputeResult deltaResult,
long totalSizeBytes)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return new ExportManifestCounts
{
TotalItems = items.Count,
ProcessedItems = results.Count,
SuccessfulItems = results.Count(r => r.Success),
FailedItems = results.Count(r => !r.Success),
SkippedItems = deltaResult.UnchangedItems.Count,
ArtifactCount = 1,
TotalSizeBytes = totalSizeBytes,
ByKind = items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count()),
ByStatus = new Dictionary<string, int>
{
["added"] = deltaResult.AddedItems.Count,
["changed"] = deltaResult.ChangedItems.Count,
["removed"] = deltaResult.RemovedItems.Count,
["unchanged"] = deltaResult.UnchangedItems.Count
}
};
}
}

View File

@@ -0,0 +1,529 @@
using System.IO.Compression;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Export adapter that produces Trivy-compatible vulnerability database bundles.
/// Schema v2 compatible with Trivy 0.46.x - 0.50.x.
/// </summary>
public sealed class TrivyDbAdapter : IExportAdapter
{
private const int SupportedSchemaVersion = 2;
private const string BundleFileName = "trivy-db.tar.gz";
private const string MetadataFileName = "metadata.json";
private const string DbFileName = "trivy.db";
private readonly ILogger<TrivyDbAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly TrivyDbAdapterOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = null // Preserve exact property names
};
public TrivyDbAdapter(
ILogger<TrivyDbAdapter> logger,
ICryptoHash cryptoHash,
TrivyDbAdapterOptions? options = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = options ?? new TrivyDbAdapterOptions();
}
/// <inheritdoc />
public string AdapterId => "trivy:db";
/// <inheritdoc />
public string DisplayName => "Trivy Vulnerability Database";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.TrivyDb];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
_logger.LogInformation(
"Starting Trivy DB export for {ItemCount} items (schema v{SchemaVersion})",
context.Items.Count,
_options.SchemaVersion);
// Validate schema version
if (_options.SchemaVersion != SupportedSchemaVersion)
{
return ExportAdapterResult.Failed(
$"Unsupported Trivy DB schema version {_options.SchemaVersion}. Only v{SupportedSchemaVersion} is supported.");
}
// Create temp directory for staging
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-db-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Process items and collect vulnerabilities
var itemResults = new List<AdapterItemResult>();
var namespaces = new Dictionary<string, TrivyNamespaceEntry>(StringComparer.OrdinalIgnoreCase);
var mapper = new TrivySchemaMapper(
_logger.CreateLogger<TrivySchemaMapper>(),
_options);
await CollectVulnerabilitiesAsync(
context,
mapper,
namespaces,
itemResults,
cancellationToken);
var totalVulnCount = namespaces.Values.Sum(ns => ns.Vulnerabilities.Count);
// Check for empty output
if (totalVulnCount == 0 && !_options.AllowEmpty)
{
return ExportAdapterResult.Failed(
"No vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.");
}
_logger.LogInformation(
"Collected {VulnCount} vulnerabilities across {NamespaceCount} namespaces",
totalVulnCount,
namespaces.Count);
// Build the database file (JSON-based for simplicity)
var dbPath = Path.Combine(tempDir, DbFileName);
await BuildDatabaseFileAsync(namespaces, dbPath, cancellationToken);
// Generate metadata
var metadata = GenerateMetadata(
context,
namespaces.Count,
totalVulnCount);
var metadataPath = Path.Combine(tempDir, MetadataFileName);
var metadataJson = JsonSerializer.Serialize(metadata, JsonOptions);
await File.WriteAllTextAsync(metadataPath, metadataJson, cancellationToken);
// Create the bundle tarball
var bundlePath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-{BundleFileName}");
await CreateBundleAsync(tempDir, bundlePath, cancellationToken);
// Calculate bundle hash
var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken);
var bundleHash = _cryptoHash.ComputeHashHex(bundleBytes, "sha256");
// Write checksum file if requested
if (context.Config.IncludeChecksums)
{
var checksumPath = bundlePath + ".sha256";
var checksumContent = $"{bundleHash} {Path.GetFileName(bundlePath)}\n";
await File.WriteAllTextAsync(checksumPath, checksumContent, cancellationToken);
}
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = bundlePath,
SizeBytes = bundleBytes.Length,
Sha256 = bundleHash,
ContentType = "application/gzip",
ItemCount = totalVulnCount,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 1,
TotalSizeBytes = artifact.SizeBytes,
ByKind = BuildKindCounts(context.Items, itemResults),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
_logger.LogInformation(
"Trivy DB bundle created: {Path} ({Bytes} bytes, {VulnCount} vulnerabilities, {NamespaceCount} namespaces, hash: {Hash})",
bundlePath,
artifact.SizeBytes,
totalVulnCount,
namespaces.Count,
bundleHash);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build Trivy DB bundle");
return ExportAdapterResult.Failed($"Trivy DB bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Trivy DB adapter doesn't support streaming - all items must be processed together
_logger.LogWarning("Trivy DB adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
// Validate output directory
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
// Validate format
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
// Validate schema version
if (_options.SchemaVersion != SupportedSchemaVersion)
{
errors.Add($"Schema version {_options.SchemaVersion} is not supported. Only v{SupportedSchemaVersion} is supported.");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private async Task CollectVulnerabilitiesAsync(
ExportAdapterContext context,
TrivySchemaMapper mapper,
Dictionary<string, TrivyNamespaceEntry> namespaces,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
// Only process advisory-type items
if (!IsAdvisoryItem(item.Kind))
{
_logger.LogDebug("Skipping non-advisory item {ItemId} of kind {Kind}", item.ItemId, item.Kind);
continue;
}
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Map to Trivy vulnerabilities
var vulns = mapper.MapAdvisory(content.JsonContent, item.SourceRef);
if (vulns.Count == 0)
{
_logger.LogDebug("No vulnerabilities mapped from item {ItemId}", item.ItemId);
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ProcessedAt = DateTimeOffset.UtcNow
});
continue;
}
// Group by namespace
foreach (var vuln in vulns)
{
var ns = vuln.DataSource?.Id ?? "unknown";
if (!namespaces.TryGetValue(ns, out var entry))
{
entry = new TrivyNamespaceEntry { Namespace = ns };
namespaces[ns] = entry;
}
// Deduplicate by (vulnId, packageName, version)
var key = $"{vuln.VulnerabilityId}|{vuln.PackageName}|{vuln.InstalledVersion}";
if (!entry.Vulnerabilities.Any(v =>
$"{v.VulnerabilityId}|{v.PackageName}|{v.InstalledVersion}" == key))
{
entry.Vulnerabilities.Add(vuln);
}
}
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ContentHash = content.OriginalHash,
ProcessedAt = DateTimeOffset.UtcNow
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
}
private static bool IsAdvisoryItem(string kind)
{
return kind.Equals("advisory", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("advisories", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("vulnerability", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("cve", StringComparison.OrdinalIgnoreCase);
}
private async Task BuildDatabaseFileAsync(
Dictionary<string, TrivyNamespaceEntry> namespaces,
string dbPath,
CancellationToken cancellationToken)
{
// For simplicity, we use a JSON-based format that Trivy can import
// In production, this would be a BoltDB file
var dbContent = new Dictionary<string, object>
{
["version"] = SupportedSchemaVersion,
["namespaces"] = namespaces.Values
.OrderBy(ns => ns.Namespace, StringComparer.Ordinal)
.Select(ns => new
{
ns.Namespace,
Vulnerabilities = ns.Vulnerabilities
.OrderBy(v => v.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(v => v.PackageName, StringComparer.Ordinal)
.ToList()
})
.ToList()
};
var json = JsonSerializer.Serialize(dbContent, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = null
});
await File.WriteAllTextAsync(dbPath, json, Encoding.UTF8, cancellationToken);
}
private TrivyDbMetadata GenerateMetadata(
ExportAdapterContext context,
int namespaceCount,
int vulnerabilityCount)
{
var now = context.TimeProvider.GetUtcNow();
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : Guid.NewGuid();
return new TrivyDbMetadata
{
Version = SupportedSchemaVersion,
Type = 0, // Full DB
UpdatedAt = now,
DownloadedAt = now,
NextUpdate = now.AddDays(1), // Default to next day
Stella = new TrivyDbStellaExtension
{
Version = "1.0.0",
RunId = runId,
TenantId = context.TenantId,
SchemaVersion = SupportedSchemaVersion,
GeneratedAt = now,
SourceCount = namespaceCount,
VulnerabilityCount = vulnerabilityCount
}
};
}
private static async Task CreateBundleAsync(
string sourceDir,
string outputPath,
CancellationToken cancellationToken)
{
// Create a memory stream for the tar, then gzip it
using var tarStream = new MemoryStream();
// Simple tar creation (header + content for each file)
foreach (var file in Directory.GetFiles(sourceDir))
{
cancellationToken.ThrowIfCancellationRequested();
var fileName = Path.GetFileName(file);
var content = await File.ReadAllBytesAsync(file, cancellationToken);
// Write tar header (simplified USTAR format)
WriteTarHeader(tarStream, fileName, content.Length);
tarStream.Write(content);
// Pad to 512-byte boundary
var padding = (512 - (content.Length % 512)) % 512;
if (padding > 0)
{
tarStream.Write(new byte[padding]);
}
}
// Write two empty 512-byte blocks to end the archive
tarStream.Write(new byte[1024]);
// Gzip the tar stream
tarStream.Position = 0;
await using var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write);
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
await tarStream.CopyToAsync(gzipStream, cancellationToken);
}
private static void WriteTarHeader(Stream stream, string fileName, long fileSize)
{
var header = new byte[512];
// File name (100 bytes)
var nameBytes = Encoding.ASCII.GetBytes(fileName);
Array.Copy(nameBytes, 0, header, 0, Math.Min(nameBytes.Length, 100));
// File mode (8 bytes) - 0644
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
// UID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
// GID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
// Size (12 bytes) - octal
var sizeOctal = Convert.ToString(fileSize, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124);
// Mtime (12 bytes) - fixed for determinism (2024-01-01 00:00:00 UTC)
Encoding.ASCII.GetBytes("17042672000\0").CopyTo(header, 136);
// Checksum placeholder (8 spaces)
Encoding.ASCII.GetBytes(" ").CopyTo(header, 148);
// Type flag - '0' for regular file
header[156] = (byte)'0';
// Magic (6 bytes) - "ustar\0"
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
// Version (2 bytes) - "00"
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
// Calculate checksum
var checksum = 0;
for (var i = 0; i < 512; i++)
{
checksum += header[i];
}
var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148);
stream.Write(header);
}
private static IReadOnlyDictionary<string, int> BuildKindCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count());
}
}
internal static class LoggerExtensions
{
public static ILogger<T> CreateLogger<T>(this ILogger logger)
{
// Use the same logger factory if available, otherwise create a null logger
return new LoggerWrapper<T>(logger);
}
private sealed class LoggerWrapper<T> : ILogger<T>
{
private readonly ILogger _inner;
public LoggerWrapper(ILogger inner) => _inner = inner;
public IDisposable? BeginScope<TState>(TState state) where TState : notnull
=> _inner.BeginScope(state);
public bool IsEnabled(LogLevel logLevel) => _inner.IsEnabled(logLevel);
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
=> _inner.Log(logLevel, eventId, state, exception, formatter);
}
}

View File

@@ -0,0 +1,374 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Configuration options for Trivy DB adapter.
/// </summary>
public sealed record TrivyDbAdapterOptions
{
/// <summary>
/// Trivy DB schema version to generate. Only v2 is currently supported.
/// </summary>
public int SchemaVersion { get; init; } = 2;
/// <summary>
/// Whether to include Java DB bundle (Maven/Gradle/SBT supplement).
/// </summary>
public bool IncludeJavaDb { get; init; }
/// <summary>
/// Whether to allow empty output (fail if no records match when false).
/// </summary>
public bool AllowEmpty { get; init; }
/// <summary>
/// Maximum CVSS vectors to include per vulnerability entry.
/// </summary>
public int MaxCvssVectorsPerEntry { get; init; } = 5;
/// <summary>
/// Namespaces to include (empty = all).
/// </summary>
public IReadOnlyList<string> IncludeNamespaces { get; init; } = [];
/// <summary>
/// Namespaces to exclude.
/// </summary>
public IReadOnlyList<string> ExcludeNamespaces { get; init; } = [];
}
/// <summary>
/// Trivy DB metadata.json structure.
/// </summary>
public sealed record TrivyDbMetadata
{
[JsonPropertyName("version")]
public int Version { get; init; } = 2;
[JsonPropertyName("type")]
public int Type { get; init; } = 0; // 0 = full DB, 1 = light
[JsonPropertyName("nextUpdate")]
public DateTimeOffset NextUpdate { get; init; }
[JsonPropertyName("updatedAt")]
public DateTimeOffset UpdatedAt { get; init; }
[JsonPropertyName("downloadedAt")]
public DateTimeOffset DownloadedAt { get; init; }
/// <summary>
/// StellaOps extension block for provenance tracking.
/// </summary>
[JsonPropertyName("stella")]
public TrivyDbStellaExtension? Stella { get; init; }
}
/// <summary>
/// StellaOps extension block in Trivy metadata.
/// </summary>
public sealed record TrivyDbStellaExtension
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0.0";
[JsonPropertyName("runId")]
public Guid RunId { get; init; }
[JsonPropertyName("profileId")]
public Guid? ProfileId { get; init; }
[JsonPropertyName("tenantId")]
public Guid TenantId { get; init; }
[JsonPropertyName("policySnapshotId")]
public Guid? PolicySnapshotId { get; init; }
[JsonPropertyName("schemaVersion")]
public int SchemaVersion { get; init; }
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
[JsonPropertyName("sourceCount")]
public int SourceCount { get; init; }
[JsonPropertyName("vulnerabilityCount")]
public int VulnerabilityCount { get; init; }
}
/// <summary>
/// Trivy vulnerability entry (simplified schema v2 compatible).
/// </summary>
public sealed record TrivyVulnerability
{
[JsonPropertyName("VulnerabilityID")]
public required string VulnerabilityId { get; init; }
[JsonPropertyName("PkgName")]
public required string PackageName { get; init; }
[JsonPropertyName("InstalledVersion")]
public string? InstalledVersion { get; init; }
[JsonPropertyName("FixedVersion")]
public string? FixedVersion { get; init; }
[JsonPropertyName("Severity")]
public required string Severity { get; init; }
[JsonPropertyName("SeveritySource")]
public string? SeveritySource { get; init; }
[JsonPropertyName("Title")]
public string? Title { get; init; }
[JsonPropertyName("Description")]
public string? Description { get; init; }
[JsonPropertyName("References")]
public IReadOnlyList<string> References { get; init; } = [];
[JsonPropertyName("CVSS")]
public IReadOnlyDictionary<string, TrivyCvss>? Cvss { get; init; }
[JsonPropertyName("CweIDs")]
public IReadOnlyList<string> CweIds { get; init; } = [];
[JsonPropertyName("PublishedDate")]
public DateTimeOffset? PublishedDate { get; init; }
[JsonPropertyName("LastModifiedDate")]
public DateTimeOffset? LastModifiedDate { get; init; }
[JsonPropertyName("DataSource")]
public TrivyDataSource? DataSource { get; init; }
}
/// <summary>
/// CVSS score entry for Trivy format.
/// </summary>
public sealed record TrivyCvss
{
[JsonPropertyName("V2Vector")]
public string? V2Vector { get; init; }
[JsonPropertyName("V3Vector")]
public string? V3Vector { get; init; }
[JsonPropertyName("V2Score")]
public double? V2Score { get; init; }
[JsonPropertyName("V3Score")]
public double? V3Score { get; init; }
}
/// <summary>
/// Data source reference for Trivy vulnerability.
/// </summary>
public sealed record TrivyDataSource
{
[JsonPropertyName("ID")]
public required string Id { get; init; }
[JsonPropertyName("Name")]
public required string Name { get; init; }
[JsonPropertyName("URL")]
public string? Url { get; init; }
}
/// <summary>
/// Trivy namespace (vendor/ecosystem) entry.
/// </summary>
public sealed record TrivyNamespaceEntry
{
/// <summary>
/// Namespace identifier (e.g., "ubuntu", "alpine", "npm").
/// </summary>
public required string Namespace { get; init; }
/// <summary>
/// Vulnerabilities in this namespace.
/// </summary>
public List<TrivyVulnerability> Vulnerabilities { get; init; } = [];
}
/// <summary>
/// Result of Trivy DB bundle build.
/// </summary>
public sealed record TrivyDbBuildResult
{
/// <summary>
/// Whether the build succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Bundle stream (tar.gz).
/// </summary>
public MemoryStream? BundleStream { get; init; }
/// <summary>
/// SHA-256 hash of the bundle.
/// </summary>
public string? BundleHash { get; init; }
/// <summary>
/// Generated metadata.
/// </summary>
public TrivyDbMetadata? Metadata { get; init; }
/// <summary>
/// Number of namespaces in the bundle.
/// </summary>
public int NamespaceCount { get; init; }
/// <summary>
/// Total number of vulnerability entries.
/// </summary>
public int VulnerabilityCount { get; init; }
/// <summary>
/// Error message if build failed.
/// </summary>
public string? ErrorMessage { get; init; }
public static TrivyDbBuildResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Severity mapping between StellaOps and Trivy.
/// </summary>
public static class TrivySeverityMapper
{
private static readonly Dictionary<string, string> SeverityMap = new(StringComparer.OrdinalIgnoreCase)
{
["critical"] = "CRITICAL",
["high"] = "HIGH",
["medium"] = "MEDIUM",
["moderate"] = "MEDIUM",
["low"] = "LOW",
["none"] = "UNKNOWN",
["info"] = "UNKNOWN",
["informational"] = "UNKNOWN",
["unknown"] = "UNKNOWN"
};
/// <summary>
/// Maps a StellaOps severity to Trivy severity.
/// </summary>
public static string MapSeverity(string? severity)
{
if (string.IsNullOrWhiteSpace(severity))
return "UNKNOWN";
return SeverityMap.TryGetValue(severity.Trim(), out var mapped)
? mapped
: "UNKNOWN";
}
}
/// <summary>
/// Namespace mapper for vendor/ecosystem to Trivy namespace.
/// </summary>
public static class TrivyNamespaceMapper
{
private static readonly Dictionary<string, string> VendorToNamespace = new(StringComparer.OrdinalIgnoreCase)
{
// OS distributions
["Ubuntu"] = "ubuntu",
["Debian"] = "debian",
["Alpine"] = "alpine",
["Red Hat"] = "redhat",
["RedHat"] = "redhat",
["RHEL"] = "redhat",
["CentOS"] = "centos",
["Oracle Linux"] = "oracle",
["Amazon Linux"] = "amazon",
["SUSE"] = "suse",
["openSUSE"] = "opensuse",
["Photon OS"] = "photon",
["Arch Linux"] = "arch",
["Fedora"] = "fedora",
["Rocky Linux"] = "rocky",
["AlmaLinux"] = "alma",
["Wolfi"] = "wolfi",
["Chainguard"] = "chainguard",
["Mariner"] = "mariner",
["CBL-Mariner"] = "mariner",
// Language ecosystems
["npm"] = "npm",
["Node.js"] = "npm",
["PyPI"] = "pypi",
["Python"] = "pypi",
["RubyGems"] = "rubygems",
["Ruby"] = "rubygems",
["Maven"] = "maven",
["Java"] = "maven",
["NuGet"] = "nuget",
[".NET"] = "nuget",
["Go"] = "go",
["Golang"] = "go",
["Cargo"] = "cargo",
["Rust"] = "cargo",
["Packagist"] = "packagist",
["PHP"] = "packagist",
["Hex"] = "hex",
["Erlang"] = "hex",
["Elixir"] = "hex",
["Pub"] = "pub",
["Dart"] = "pub",
["Swift"] = "swift",
["CocoaPods"] = "cocoapods",
// Generic sources
["NVD"] = "nvd",
["GHSA"] = "ghsa",
["GitHub"] = "ghsa",
["OSV"] = "osv",
["CISA KEV"] = "kev"
};
/// <summary>
/// Maps a vendor/ecosystem to Trivy namespace.
/// </summary>
public static string MapToNamespace(string? vendor, string? ecosystem = null)
{
// Try vendor first
if (!string.IsNullOrWhiteSpace(vendor) &&
VendorToNamespace.TryGetValue(vendor.Trim(), out var ns))
{
return ns;
}
// Try ecosystem
if (!string.IsNullOrWhiteSpace(ecosystem) &&
VendorToNamespace.TryGetValue(ecosystem.Trim(), out ns))
{
return ns;
}
// Fallback to lowercase vendor
return string.IsNullOrWhiteSpace(vendor)
? "unknown"
: vendor.Trim().ToLowerInvariant().Replace(" ", "-");
}
/// <summary>
/// Checks if a namespace is a language ecosystem (vs OS distribution).
/// </summary>
public static bool IsLanguageEcosystem(string ns)
{
return ns switch
{
"npm" or "pypi" or "rubygems" or "maven" or "nuget" or
"go" or "cargo" or "packagist" or "hex" or "pub" or
"swift" or "cocoapods" => true,
_ => false
};
}
}

View File

@@ -0,0 +1,622 @@
using System.IO.Compression;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Export adapter that produces Trivy Java DB bundles (Maven/Gradle/SBT supplement).
/// Schema v1 compatible with Trivy 0.46.x - 0.50.x Java scanning.
/// </summary>
public sealed class TrivyJavaDbAdapter : IExportAdapter
{
private const int SupportedSchemaVersion = 1;
private const string BundleFileName = "trivy-java-db.tar.gz";
private const string MetadataFileName = "metadata.json";
private const string IndexFileName = "indexes.json";
// Java ecosystem namespaces
private static readonly HashSet<string> JavaNamespaces = new(StringComparer.OrdinalIgnoreCase)
{
"maven",
"gradle",
"sbt",
"java",
"ghsa-maven"
};
private readonly ILogger<TrivyJavaDbAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly TrivyDbAdapterOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = null // Preserve exact property names
};
public TrivyJavaDbAdapter(
ILogger<TrivyJavaDbAdapter> logger,
ICryptoHash cryptoHash,
TrivyDbAdapterOptions? options = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = options ?? new TrivyDbAdapterOptions { IncludeJavaDb = true };
}
/// <inheritdoc />
public string AdapterId => "trivy:java-db";
/// <inheritdoc />
public string DisplayName => "Trivy Java Vulnerability Database";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.TrivyJavaDb];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
_logger.LogInformation(
"Starting Trivy Java DB export for {ItemCount} items",
context.Items.Count);
// Create temp directory for staging
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-java-db-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Process items and collect Java-specific vulnerabilities
var itemResults = new List<AdapterItemResult>();
var javaIndexes = new Dictionary<string, JavaDbIndex>(StringComparer.OrdinalIgnoreCase);
var javaOptions = new TrivyDbAdapterOptions
{
SchemaVersion = SupportedSchemaVersion,
IncludeJavaDb = true,
IncludeNamespaces = JavaNamespaces.ToList(),
AllowEmpty = _options.AllowEmpty,
MaxCvssVectorsPerEntry = _options.MaxCvssVectorsPerEntry
};
var mapper = new TrivySchemaMapper(
_logger.CreateLogger<TrivySchemaMapper>(),
javaOptions);
await CollectJavaVulnerabilitiesAsync(
context,
mapper,
javaIndexes,
itemResults,
cancellationToken);
var totalVulnCount = javaIndexes.Values.Sum(idx => idx.Entries.Count);
// Check for empty output
if (totalVulnCount == 0 && !_options.AllowEmpty)
{
return ExportAdapterResult.Failed(
"No Java vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.");
}
_logger.LogInformation(
"Collected {VulnCount} Java vulnerabilities across {IndexCount} GAV indexes",
totalVulnCount,
javaIndexes.Count);
// Build the indexes file (GAV-based lookup)
var indexesPath = Path.Combine(tempDir, IndexFileName);
await BuildIndexesFileAsync(javaIndexes, indexesPath, cancellationToken);
// Generate metadata
var metadata = GenerateMetadata(context, javaIndexes.Count, totalVulnCount);
var metadataPath = Path.Combine(tempDir, MetadataFileName);
var metadataJson = JsonSerializer.Serialize(metadata, JsonOptions);
await File.WriteAllTextAsync(metadataPath, metadataJson, cancellationToken);
// Create the bundle tarball
var bundlePath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-{BundleFileName}");
await CreateBundleAsync(tempDir, bundlePath, cancellationToken);
// Calculate bundle hash
var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken);
var bundleHash = _cryptoHash.ComputeHashHex(bundleBytes, "sha256");
// Write checksum file if requested
if (context.Config.IncludeChecksums)
{
var checksumPath = bundlePath + ".sha256";
var checksumContent = $"{bundleHash} {Path.GetFileName(bundlePath)}\n";
await File.WriteAllTextAsync(checksumPath, checksumContent, cancellationToken);
}
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = bundlePath,
SizeBytes = bundleBytes.Length,
Sha256 = bundleHash,
ContentType = "application/gzip",
ItemCount = totalVulnCount,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 1,
TotalSizeBytes = artifact.SizeBytes,
ByKind = BuildKindCounts(context.Items, itemResults),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
_logger.LogInformation(
"Trivy Java DB bundle created: {Path} ({Bytes} bytes, {VulnCount} vulnerabilities, {IndexCount} GAV indexes, hash: {Hash})",
bundlePath,
artifact.SizeBytes,
totalVulnCount,
javaIndexes.Count,
bundleHash);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build Trivy Java DB bundle");
return ExportAdapterResult.Failed($"Trivy Java DB bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Java DB adapter doesn't support streaming - all items must be processed together
_logger.LogWarning("Trivy Java DB adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
// Validate output directory
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
// Validate format
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private async Task CollectJavaVulnerabilitiesAsync(
ExportAdapterContext context,
TrivySchemaMapper mapper,
Dictionary<string, JavaDbIndex> javaIndexes,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
// Only process advisory-type items
if (!IsAdvisoryItem(item.Kind))
{
_logger.LogDebug("Skipping non-advisory item {ItemId} of kind {Kind}", item.ItemId, item.Kind);
continue;
}
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Map to Trivy vulnerabilities (filtered to Java namespaces)
var vulns = mapper.MapAdvisory(content.JsonContent, item.SourceRef);
if (vulns.Count == 0)
{
_logger.LogDebug("No Java vulnerabilities mapped from item {ItemId}", item.ItemId);
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ProcessedAt = DateTimeOffset.UtcNow
});
continue;
}
// Build GAV-based indexes (GroupId:ArtifactId:Version)
foreach (var vuln in vulns)
{
// Only include Java ecosystem vulnerabilities
var ns = vuln.DataSource?.Id ?? "unknown";
if (!IsJavaNamespace(ns))
{
continue;
}
// Parse package name as GAV coordinate
var gav = ParseGavCoordinate(vuln.PackageName);
if (gav is null)
{
// Fall back to using package name as artifact ID
gav = new GavCoordinate("unknown", vuln.PackageName, vuln.InstalledVersion);
}
var indexKey = $"{gav.GroupId}:{gav.ArtifactId}";
if (!javaIndexes.TryGetValue(indexKey, out var index))
{
index = new JavaDbIndex
{
GroupId = gav.GroupId,
ArtifactId = gav.ArtifactId
};
javaIndexes[indexKey] = index;
}
// Add vulnerability entry
var entry = new JavaDbEntry
{
VulnerabilityId = vuln.VulnerabilityId,
AffectedVersions = vuln.InstalledVersion ?? "*",
FixedVersions = vuln.FixedVersion,
Severity = vuln.Severity,
Title = vuln.Title,
Description = vuln.Description?.Length > 500
? vuln.Description[..500] + "..."
: vuln.Description
};
// Deduplicate
if (!index.Entries.Any(e => e.VulnerabilityId == entry.VulnerabilityId &&
e.AffectedVersions == entry.AffectedVersions))
{
index.Entries.Add(entry);
}
}
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ContentHash = content.OriginalHash,
ProcessedAt = DateTimeOffset.UtcNow
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
}
private static bool IsAdvisoryItem(string kind)
{
return kind.Equals("advisory", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("advisories", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("vulnerability", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("cve", StringComparison.OrdinalIgnoreCase);
}
private static bool IsJavaNamespace(string ns)
{
return JavaNamespaces.Contains(ns) ||
ns.StartsWith("maven", StringComparison.OrdinalIgnoreCase) ||
ns.StartsWith("gradle", StringComparison.OrdinalIgnoreCase) ||
ns.Contains("java", StringComparison.OrdinalIgnoreCase);
}
private static GavCoordinate? ParseGavCoordinate(string packageName)
{
if (string.IsNullOrWhiteSpace(packageName))
return null;
// Try GroupId:ArtifactId:Version format
var parts = packageName.Split(':');
if (parts.Length >= 2)
{
return new GavCoordinate(
parts[0],
parts[1],
parts.Length >= 3 ? parts[2] : null);
}
// Try GroupId/ArtifactId format (PURL style)
var slashIndex = packageName.LastIndexOf('/');
if (slashIndex > 0)
{
return new GavCoordinate(
packageName[..slashIndex].Replace('/', '.'),
packageName[(slashIndex + 1)..],
null);
}
return null;
}
private static async Task BuildIndexesFileAsync(
Dictionary<string, JavaDbIndex> indexes,
string indexesPath,
CancellationToken cancellationToken)
{
var sortedIndexes = indexes.Values
.OrderBy(idx => idx.GroupId, StringComparer.Ordinal)
.ThenBy(idx => idx.ArtifactId, StringComparer.Ordinal)
.Select(idx => new
{
idx.GroupId,
idx.ArtifactId,
Vulnerabilities = idx.Entries
.OrderBy(e => e.VulnerabilityId, StringComparer.Ordinal)
.ToList()
})
.ToList();
var content = new Dictionary<string, object>
{
["schemaVersion"] = SupportedSchemaVersion,
["type"] = "java",
["indexes"] = sortedIndexes
};
var json = JsonSerializer.Serialize(content, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = null
});
await File.WriteAllTextAsync(indexesPath, json, Encoding.UTF8, cancellationToken);
}
private TrivyJavaDbMetadata GenerateMetadata(
ExportAdapterContext context,
int indexCount,
int vulnerabilityCount)
{
var now = context.TimeProvider.GetUtcNow();
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : Guid.NewGuid();
return new TrivyJavaDbMetadata
{
Version = SupportedSchemaVersion,
Type = "java",
UpdatedAt = now,
DownloadedAt = now,
NextUpdate = now.AddDays(1),
Stella = new TrivyDbStellaExtension
{
Version = "1.0.0",
RunId = runId,
TenantId = context.TenantId,
SchemaVersion = SupportedSchemaVersion,
GeneratedAt = now,
SourceCount = indexCount,
VulnerabilityCount = vulnerabilityCount
}
};
}
private static async Task CreateBundleAsync(
string sourceDir,
string outputPath,
CancellationToken cancellationToken)
{
// Create a memory stream for the tar, then gzip it
using var tarStream = new MemoryStream();
// Simple tar creation (header + content for each file)
foreach (var file in Directory.GetFiles(sourceDir))
{
cancellationToken.ThrowIfCancellationRequested();
var fileName = Path.GetFileName(file);
var content = await File.ReadAllBytesAsync(file, cancellationToken);
// Write tar header (simplified USTAR format)
WriteTarHeader(tarStream, fileName, content.Length);
tarStream.Write(content);
// Pad to 512-byte boundary
var padding = (512 - (content.Length % 512)) % 512;
if (padding > 0)
{
tarStream.Write(new byte[padding]);
}
}
// Write two empty 512-byte blocks to end the archive
tarStream.Write(new byte[1024]);
// Gzip the tar stream
tarStream.Position = 0;
await using var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write);
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
await tarStream.CopyToAsync(gzipStream, cancellationToken);
}
private static void WriteTarHeader(Stream stream, string fileName, long fileSize)
{
var header = new byte[512];
// File name (100 bytes)
var nameBytes = Encoding.ASCII.GetBytes(fileName);
Array.Copy(nameBytes, 0, header, 0, Math.Min(nameBytes.Length, 100));
// File mode (8 bytes) - 0644
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
// UID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
// GID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
// Size (12 bytes) - octal
var sizeOctal = Convert.ToString(fileSize, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124);
// Mtime (12 bytes) - fixed for determinism (2024-01-01 00:00:00 UTC)
Encoding.ASCII.GetBytes("17042672000\0").CopyTo(header, 136);
// Checksum placeholder (8 spaces)
Encoding.ASCII.GetBytes(" ").CopyTo(header, 148);
// Type flag - '0' for regular file
header[156] = (byte)'0';
// Magic (6 bytes) - "ustar\0"
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
// Version (2 bytes) - "00"
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
// Calculate checksum
var checksum = 0;
for (var i = 0; i < 512; i++)
{
checksum += header[i];
}
var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148);
stream.Write(header);
}
private static IReadOnlyDictionary<string, int> BuildKindCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count());
}
// Internal types for Java DB
private sealed record GavCoordinate(string GroupId, string ArtifactId, string? Version);
private sealed class JavaDbIndex
{
public required string GroupId { get; init; }
public required string ArtifactId { get; init; }
public List<JavaDbEntry> Entries { get; init; } = [];
}
private sealed class JavaDbEntry
{
public required string VulnerabilityId { get; init; }
public required string AffectedVersions { get; init; }
public string? FixedVersions { get; init; }
public required string Severity { get; init; }
public string? Title { get; init; }
public string? Description { get; init; }
}
}
/// <summary>
/// Trivy Java DB metadata.json structure.
/// </summary>
public sealed record TrivyJavaDbMetadata
{
[System.Text.Json.Serialization.JsonPropertyName("version")]
public int Version { get; init; } = 1;
[System.Text.Json.Serialization.JsonPropertyName("type")]
public string Type { get; init; } = "java";
[System.Text.Json.Serialization.JsonPropertyName("nextUpdate")]
public DateTimeOffset NextUpdate { get; init; }
[System.Text.Json.Serialization.JsonPropertyName("updatedAt")]
public DateTimeOffset UpdatedAt { get; init; }
[System.Text.Json.Serialization.JsonPropertyName("downloadedAt")]
public DateTimeOffset DownloadedAt { get; init; }
/// <summary>
/// StellaOps extension block for provenance tracking.
/// </summary>
[System.Text.Json.Serialization.JsonPropertyName("stella")]
public TrivyDbStellaExtension? Stella { get; init; }
}

View File

@@ -0,0 +1,463 @@
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Maps StellaOps advisory/vulnerability data to Trivy DB schema.
/// </summary>
public sealed class TrivySchemaMapper
{
private readonly ILogger<TrivySchemaMapper> _logger;
private readonly TrivyDbAdapterOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNameCaseInsensitive = true
};
public TrivySchemaMapper(ILogger<TrivySchemaMapper> logger, TrivyDbAdapterOptions options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? throw new ArgumentNullException(nameof(options));
}
/// <summary>
/// Maps a StellaOps advisory JSON to Trivy vulnerability entries.
/// </summary>
public IReadOnlyList<TrivyVulnerability> MapAdvisory(string jsonContent, string? sourceRef = null)
{
var results = new List<TrivyVulnerability>();
try
{
using var doc = JsonDocument.Parse(jsonContent);
var root = doc.RootElement;
// Extract CVE identifiers
var cveIds = ExtractCveIds(root);
if (cveIds.Count == 0)
{
_logger.LogDebug("No CVE identifiers found in advisory");
return results;
}
// Extract vendor/ecosystem for namespace
var vendor = GetStringProperty(root, "source", "vendor") ??
GetStringProperty(root, "vendor") ??
GetStringProperty(root, "namespace");
var ecosystem = GetStringProperty(root, "ecosystem") ??
GetStringProperty(root, "type");
var ns = TrivyNamespaceMapper.MapToNamespace(vendor, ecosystem);
// Check namespace filters
if (_options.IncludeNamespaces.Count > 0 &&
!_options.IncludeNamespaces.Contains(ns, StringComparer.OrdinalIgnoreCase))
{
return results;
}
if (_options.ExcludeNamespaces.Contains(ns, StringComparer.OrdinalIgnoreCase))
{
return results;
}
// Extract common fields
var severity = ExtractSeverity(root);
var title = GetStringProperty(root, "title") ??
GetStringProperty(root, "vulnerabilityName") ??
GetStringProperty(root, "name");
var description = GetStringProperty(root, "description") ??
GetStringProperty(root, "shortDescription") ??
GetStringProperty(root, "summary");
var references = ExtractReferences(root);
var cvss = ExtractCvss(root);
var cweIds = ExtractCweIds(root);
var publishedDate = ExtractDate(root, "publishedDate", "dateAdded", "published");
var modifiedDate = ExtractDate(root, "lastModifiedDate", "dateUpdated", "modified");
// Extract affected packages
var packages = ExtractAffectedPackages(root);
if (packages.Count == 0)
{
// Create one entry per CVE without package info
foreach (var cveId in cveIds)
{
results.Add(new TrivyVulnerability
{
VulnerabilityId = cveId,
PackageName = "*", // Wildcard for unspecified
Severity = severity,
Title = title,
Description = description,
References = references,
Cvss = cvss,
CweIds = cweIds,
PublishedDate = publishedDate,
LastModifiedDate = modifiedDate,
DataSource = new TrivyDataSource
{
Id = ns,
Name = vendor ?? ns,
Url = sourceRef
}
});
}
}
else
{
// Create entries for each CVE + package combination
foreach (var cveId in cveIds)
{
foreach (var pkg in packages)
{
results.Add(new TrivyVulnerability
{
VulnerabilityId = cveId,
PackageName = pkg.Name,
InstalledVersion = pkg.VulnerableRange,
FixedVersion = pkg.FixedVersion,
Severity = severity,
Title = title,
Description = description,
References = references,
Cvss = cvss,
CweIds = cweIds,
PublishedDate = publishedDate,
LastModifiedDate = modifiedDate,
DataSource = new TrivyDataSource
{
Id = ns,
Name = vendor ?? ns,
Url = sourceRef
}
});
}
}
}
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to parse advisory JSON");
}
return results;
}
private List<string> ExtractCveIds(JsonElement root)
{
var cveIds = new List<string>();
// Try various paths for CVE identifiers
if (TryGetArray(root, "identifiers", "cve", out var cveArray) ||
TryGetArray(root, "cveIDs", out cveArray) ||
TryGetArray(root, "CVEIDs", out cveArray) ||
TryGetArray(root, "aliases", out cveArray))
{
foreach (var item in cveArray)
{
if (item.ValueKind == JsonValueKind.String)
{
var id = item.GetString();
if (!string.IsNullOrWhiteSpace(id) && id.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
{
cveIds.Add(id.ToUpperInvariant());
}
}
}
}
// Try single cveID field
var singleCve = GetStringProperty(root, "cveID") ??
GetStringProperty(root, "cve") ??
GetStringProperty(root, "id");
if (!string.IsNullOrWhiteSpace(singleCve) &&
singleCve.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase) &&
!cveIds.Contains(singleCve, StringComparer.OrdinalIgnoreCase))
{
cveIds.Add(singleCve.ToUpperInvariant());
}
return cveIds;
}
private string ExtractSeverity(JsonElement root)
{
var severity = GetStringProperty(root, "severity", "normalized") ??
GetStringProperty(root, "severity") ??
GetStringProperty(root, "severityLevel") ??
GetStringProperty(root, "cvss", "severity");
return TrivySeverityMapper.MapSeverity(severity);
}
private List<string> ExtractReferences(JsonElement root)
{
var refs = new List<string>();
if (TryGetArray(root, "references", out var refArray))
{
foreach (var item in refArray)
{
string? url = null;
if (item.ValueKind == JsonValueKind.String)
{
url = item.GetString();
}
else if (item.ValueKind == JsonValueKind.Object)
{
url = GetStringProperty(item, "url") ?? GetStringProperty(item, "href");
}
if (!string.IsNullOrWhiteSpace(url) && Uri.TryCreate(url, UriKind.Absolute, out _))
{
refs.Add(url);
}
}
}
return refs;
}
private IReadOnlyDictionary<string, TrivyCvss>? ExtractCvss(JsonElement root)
{
var cvssDict = new Dictionary<string, TrivyCvss>();
var count = 0;
// Try array of CVSS entries
if (TryGetArray(root, "cvss", out var cvssArray))
{
foreach (var item in cvssArray)
{
if (count >= _options.MaxCvssVectorsPerEntry)
break;
var source = GetStringProperty(item, "source") ?? "nvd";
var entry = ParseCvssEntry(item);
if (entry is not null && !cvssDict.ContainsKey(source))
{
cvssDict[source] = entry;
count++;
}
}
}
// Try single CVSS object
if (cvssDict.Count == 0 && root.TryGetProperty("cvss", out var cvssObj) &&
cvssObj.ValueKind == JsonValueKind.Object)
{
var entry = ParseCvssEntry(cvssObj);
if (entry is not null)
{
cvssDict["nvd"] = entry;
}
}
// Try metrics.cvssMetricV3* paths (NVD format)
if (cvssDict.Count == 0 && root.TryGetProperty("metrics", out var metrics))
{
if (metrics.TryGetProperty("cvssMetricV31", out var v31Array))
{
foreach (var metric in v31Array.EnumerateArray().Take(1))
{
if (metric.TryGetProperty("cvssData", out var cvssData))
{
var entry = ParseCvssEntry(cvssData);
if (entry is not null)
{
cvssDict["nvd"] = entry;
break;
}
}
}
}
}
return cvssDict.Count > 0 ? cvssDict : null;
}
private TrivyCvss? ParseCvssEntry(JsonElement element)
{
var v2Vector = GetStringProperty(element, "vectorString") ??
GetStringProperty(element, "vector") ??
GetStringProperty(element, "v2Vector");
var v3Vector = GetStringProperty(element, "vectorString") ??
GetStringProperty(element, "vector") ??
GetStringProperty(element, "v3Vector");
// Determine version from vector string
if (v2Vector?.StartsWith("AV:", StringComparison.OrdinalIgnoreCase) == true ||
v2Vector?.StartsWith("(AV:", StringComparison.OrdinalIgnoreCase) == true)
{
v3Vector = null;
}
else if (v3Vector?.StartsWith("CVSS:3", StringComparison.OrdinalIgnoreCase) == true)
{
v2Vector = null;
}
double? v2Score = null, v3Score = null;
if (element.TryGetProperty("score", out var scoreProp) ||
element.TryGetProperty("baseScore", out scoreProp))
{
if (scoreProp.TryGetDouble(out var score))
{
if (v2Vector is not null)
v2Score = score;
else
v3Score = score;
}
}
if (v2Vector is null && v3Vector is null && v2Score is null && v3Score is null)
return null;
return new TrivyCvss
{
V2Vector = v2Vector,
V3Vector = v3Vector,
V2Score = v2Score,
V3Score = v3Score
};
}
private List<string> ExtractCweIds(JsonElement root)
{
var cweIds = new List<string>();
if (TryGetArray(root, "cweIDs", out var cweArray) ||
TryGetArray(root, "cwes", out cweArray) ||
TryGetArray(root, "weaknesses", out cweArray))
{
foreach (var item in cweArray)
{
string? cweId = null;
if (item.ValueKind == JsonValueKind.String)
{
cweId = item.GetString();
}
else if (item.ValueKind == JsonValueKind.Object)
{
cweId = GetStringProperty(item, "cweId") ?? GetStringProperty(item, "id");
}
if (!string.IsNullOrWhiteSpace(cweId))
{
// Normalize to CWE-### format
if (!cweId.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
{
cweId = $"CWE-{cweId}";
}
cweIds.Add(cweId.ToUpperInvariant());
}
}
}
return cweIds;
}
private DateTimeOffset? ExtractDate(JsonElement root, params string[] paths)
{
foreach (var path in paths)
{
var value = GetStringProperty(root, path);
if (!string.IsNullOrWhiteSpace(value) &&
DateTimeOffset.TryParse(value, out var date))
{
return date;
}
}
return null;
}
private List<AffectedPackage> ExtractAffectedPackages(JsonElement root)
{
var packages = new List<AffectedPackage>();
// Try various paths for affected packages
JsonElement.ArrayEnumerator? affectedArray = null;
if (root.TryGetProperty("affects", out var affects) &&
affects.ValueKind == JsonValueKind.Array)
{
affectedArray = affects.EnumerateArray();
}
else if (root.TryGetProperty("affected", out var affected) &&
affected.ValueKind == JsonValueKind.Array)
{
affectedArray = affected.EnumerateArray();
}
else if (root.TryGetProperty("vulnerabilities", out var vulns) &&
vulns.ValueKind == JsonValueKind.Array)
{
// CISA KEV style
foreach (var vuln in vulns.EnumerateArray())
{
var product = GetStringProperty(vuln, "product");
if (!string.IsNullOrWhiteSpace(product))
{
packages.Add(new AffectedPackage(product, null, null));
}
}
return packages;
}
if (affectedArray.HasValue)
{
foreach (var item in affectedArray.Value)
{
var name = GetStringProperty(item, "package", "name") ??
GetStringProperty(item, "name") ??
GetStringProperty(item, "packageName");
var range = GetStringProperty(item, "vulnerableRange") ??
GetStringProperty(item, "versionRange") ??
GetStringProperty(item, "version");
var fixedVer = GetStringProperty(item, "fixedVersion") ??
GetStringProperty(item, "patchedVersions") ??
GetStringProperty(item, "remediation", "fixedVersion");
if (!string.IsNullOrWhiteSpace(name))
{
packages.Add(new AffectedPackage(name, range, fixedVer));
}
}
}
return packages;
}
private static string? GetStringProperty(JsonElement element, params string[] paths)
{
var current = element;
foreach (var path in paths)
{
if (!current.TryGetProperty(path, out var next))
return null;
current = next;
}
return current.ValueKind == JsonValueKind.String ? current.GetString() : null;
}
private static bool TryGetArray(JsonElement element, string property, out JsonElement.ArrayEnumerator result)
{
result = default;
if (element.TryGetProperty(property, out var prop) && prop.ValueKind == JsonValueKind.Array)
{
result = prop.EnumerateArray();
return true;
}
return false;
}
private static bool TryGetArray(JsonElement element, string prop1, string prop2, out JsonElement.ArrayEnumerator result)
{
result = default;
if (element.TryGetProperty(prop1, out var nested) &&
nested.TryGetProperty(prop2, out var array) &&
array.ValueKind == JsonValueKind.Array)
{
result = array.EnumerateArray();
return true;
}
return false;
}
private sealed record AffectedPackage(string Name, string? VulnerableRange, string? FixedVersion);
}

View File

@@ -0,0 +1,110 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.ExportCenter.Core.Crypto;
/// <summary>
/// Extension methods for registering export crypto services.
/// </summary>
public static class CryptoServiceCollectionExtensions
{
/// <summary>
/// Adds export crypto services with default configuration.
/// Routes hashing, signing, and encryption through ICryptoProviderRegistry and ICryptoHash.
/// </summary>
public static IServiceCollection AddExportCryptoServices(this IServiceCollection services)
{
return services.AddExportCryptoServices(_ => { });
}
/// <summary>
/// Adds export crypto services with custom configuration.
/// </summary>
public static IServiceCollection AddExportCryptoServices(
this IServiceCollection services,
Action<ExportCryptoOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configureOptions);
services.Configure(configureOptions);
// Register crypto service
services.TryAddSingleton<IExportCryptoService, ExportCryptoService>();
// Register factory for creating services with custom options
services.TryAddSingleton<IExportCryptoServiceFactory, ExportCryptoServiceFactory>();
return services;
}
/// <summary>
/// Adds export crypto services with provider selection.
/// </summary>
public static IServiceCollection AddExportCryptoServicesWithProvider(
this IServiceCollection services,
string preferredProvider,
Action<ExportCryptoOptions>? additionalConfig = null)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(preferredProvider);
return services.AddExportCryptoServices(options =>
{
options.PreferredProvider = preferredProvider;
additionalConfig?.Invoke(options);
});
}
/// <summary>
/// Adds export crypto services for FIPS compliance mode.
/// </summary>
public static IServiceCollection AddExportCryptoServicesForFips(
this IServiceCollection services,
string? keyId = null)
{
return services.AddExportCryptoServices(options =>
{
options.HashAlgorithm = "SHA-256";
options.SigningAlgorithm = "ES256";
options.UseComplianceProfile = true;
options.DefaultKeyId = keyId;
});
}
/// <summary>
/// Adds export crypto services for GOST compliance mode.
/// </summary>
public static IServiceCollection AddExportCryptoServicesForGost(
this IServiceCollection services,
string? keyId = null,
string? preferredProvider = null)
{
return services.AddExportCryptoServices(options =>
{
options.HashAlgorithm = "GOST-R-34.11-2012-256";
options.SigningAlgorithm = "GOST-R-34.10-2012-256";
options.UseComplianceProfile = true;
options.DefaultKeyId = keyId;
options.PreferredProvider = preferredProvider;
});
}
/// <summary>
/// Adds export crypto services for SM (Chinese cryptography) compliance mode.
/// </summary>
public static IServiceCollection AddExportCryptoServicesForSm(
this IServiceCollection services,
string? keyId = null,
string? preferredProvider = null)
{
return services.AddExportCryptoServices(options =>
{
options.HashAlgorithm = "SM3";
options.SigningAlgorithm = "SM2";
options.UseComplianceProfile = true;
options.DefaultKeyId = keyId;
options.PreferredProvider = preferredProvider;
});
}
}

View File

@@ -0,0 +1,396 @@
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// AES-256-GCM bundle encryptor implementation.
/// Follows the KMS envelope pattern with DEK per run and per-file nonces.
/// </summary>
public sealed class AesGcmBundleEncryptor : IBundleEncryptor
{
private readonly ILogger<AesGcmBundleEncryptor> _logger;
private readonly IBundleKeyWrapperFactory _keyWrapperFactory;
private readonly BundleEncryptionOptions _options;
private const int TagSizeBytes = 16; // 128-bit authentication tag
public AesGcmBundleEncryptor(
ILogger<AesGcmBundleEncryptor> logger,
IBundleKeyWrapperFactory keyWrapperFactory,
IOptions<BundleEncryptionOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_keyWrapperFactory = keyWrapperFactory ?? throw new ArgumentNullException(nameof(keyWrapperFactory));
_options = options?.Value ?? new BundleEncryptionOptions();
}
/// <inheritdoc />
public async Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.Files.Count == 0)
{
return BundleEncryptResult.Failed("No files to encrypt");
}
var hasAgeRecipients = request.AgeRecipients.Count > 0;
var hasKmsKey = !string.IsNullOrEmpty(request.KmsKeyId);
if (!hasAgeRecipients && !hasKmsKey)
{
return BundleEncryptResult.Failed("At least one age recipient or KMS key ID is required");
}
try
{
// Generate DEK for this run
var dek = GenerateDek();
try
{
// Wrap DEK for all recipients
var wrappedKeys = await WrapDekForRecipientsAsync(
dek,
request.AgeRecipients,
request.KmsKeyId,
request.TenantId,
request.RunId,
cancellationToken);
// Encrypt all files
var encryptedFiles = new Dictionary<string, byte[]>();
var fileMetadata = new List<EncryptedFileMetadata>();
foreach (var (relativePath, content) in request.Files)
{
cancellationToken.ThrowIfCancellationRequested();
var (ciphertext, metadata) = EncryptFile(
dek,
relativePath,
content,
request.RunId);
encryptedFiles[relativePath] = ciphertext;
fileMetadata.Add(metadata);
}
// Determine mode string
var modeString = hasKmsKey ? "aes-gcm+kms" : "age";
var encryptionMetadata = new BundleEncryptionMetadata
{
Mode = modeString,
AadFormat = "{runId}:{relativePath}",
NonceFormat = "random-12",
Recipients = wrappedKeys.OrderBy(r => r.Type)
.ThenBy(r => r.Recipient ?? r.KmsKeyId)
.ToList(),
Files = fileMetadata.OrderBy(f => f.Path).ToList()
};
_logger.LogInformation(
"Encrypted {FileCount} files for run {RunId} with {RecipientCount} recipients",
encryptedFiles.Count,
request.RunId,
wrappedKeys.Count);
return new BundleEncryptResult
{
Success = true,
EncryptedFiles = encryptedFiles,
Metadata = encryptionMetadata
};
}
finally
{
// Zeroize DEK
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to encrypt bundle for run {RunId}", request.RunId);
return BundleEncryptResult.Failed($"Encryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Metadata);
if (request.EncryptedFiles.Count == 0)
{
return BundleDecryptResult.Failed("No files to decrypt");
}
try
{
// Find a recipient we can unwrap
var dek = await UnwrapDekAsync(request.Metadata.Recipients, request.AgePrivateKey, cancellationToken);
if (dek is null)
{
return BundleDecryptResult.Failed("No available key to unwrap DEK");
}
try
{
var decryptedFiles = new Dictionary<string, byte[]>();
var verificationFailures = new List<string>();
// Build file metadata lookup
var metadataLookup = request.Metadata.Files.ToDictionary(f => f.Path);
foreach (var (relativePath, ciphertext) in request.EncryptedFiles)
{
cancellationToken.ThrowIfCancellationRequested();
if (!metadataLookup.TryGetValue(relativePath, out var fileMetadata))
{
_logger.LogWarning("No metadata found for encrypted file {Path}", relativePath);
verificationFailures.Add(relativePath);
continue;
}
try
{
var plaintext = DecryptFile(
dek,
relativePath,
ciphertext,
fileMetadata,
request.RunId);
// Verify hash if available
if (_options.IncludeFileHashes && !string.IsNullOrEmpty(fileMetadata.OriginalHash))
{
var actualHash = ComputeHash(plaintext);
if (!string.Equals(actualHash, fileMetadata.OriginalHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Hash mismatch for {Path}: expected {Expected}, got {Actual}",
relativePath,
fileMetadata.OriginalHash,
actualHash);
verificationFailures.Add(relativePath);
}
}
decryptedFiles[relativePath] = plaintext;
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to decrypt file {Path}", relativePath);
verificationFailures.Add(relativePath);
}
}
_logger.LogInformation(
"Decrypted {FileCount} files for run {RunId}, {FailureCount} failures",
decryptedFiles.Count,
request.RunId,
verificationFailures.Count);
return new BundleDecryptResult
{
Success = verificationFailures.Count == 0,
DecryptedFiles = decryptedFiles,
VerificationFailures = verificationFailures
};
}
finally
{
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to decrypt bundle for run {RunId}", request.RunId);
return BundleDecryptResult.Failed($"Decryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> VerifyDecryptedContentAsync(
BundleDecryptResult decryptResult,
BundleEncryptionMetadata metadata,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(decryptResult);
ArgumentNullException.ThrowIfNull(metadata);
var failures = new List<string>();
var metadataLookup = metadata.Files.ToDictionary(f => f.Path);
foreach (var (path, content) in decryptResult.DecryptedFiles)
{
if (!metadataLookup.TryGetValue(path, out var fileMetadata))
{
failures.Add(path);
continue;
}
if (!string.IsNullOrEmpty(fileMetadata.OriginalHash))
{
var actualHash = ComputeHash(content);
if (!string.Equals(actualHash, fileMetadata.OriginalHash, StringComparison.OrdinalIgnoreCase))
{
failures.Add(path);
}
}
}
return Task.FromResult<IReadOnlyList<string>>(failures);
}
private byte[] GenerateDek()
{
var dek = new byte[_options.DekSizeBytes];
RandomNumberGenerator.Fill(dek);
return dek;
}
private byte[] GenerateNonce()
{
var nonce = new byte[_options.NonceSizeBytes];
RandomNumberGenerator.Fill(nonce);
return nonce;
}
private async Task<List<WrappedKeyRecipient>> WrapDekForRecipientsAsync(
byte[] dek,
IReadOnlyList<string> ageRecipients,
string? kmsKeyId,
Guid tenantId,
Guid runId,
CancellationToken cancellationToken)
{
var wrappedKeys = new List<WrappedKeyRecipient>();
// Wrap for age recipients
if (ageRecipients.Count > 0)
{
var ageWrapper = _keyWrapperFactory.GetWrapper(KeyWrapperType.Age);
foreach (var recipient in ageRecipients.OrderBy(r => r))
{
var wrapped = await ageWrapper.WrapKeyAsync(dek, recipient, tenantId, runId, cancellationToken);
wrappedKeys.Add(wrapped);
}
}
// Wrap for KMS
if (!string.IsNullOrEmpty(kmsKeyId))
{
var kmsWrapper = _keyWrapperFactory.GetWrapper(KeyWrapperType.Kms);
var wrapped = await kmsWrapper.WrapKeyAsync(dek, kmsKeyId, tenantId, runId, cancellationToken);
wrappedKeys.Add(wrapped);
}
return wrappedKeys;
}
private async Task<byte[]?> UnwrapDekAsync(
IReadOnlyList<WrappedKeyRecipient> recipients,
string? agePrivateKey,
CancellationToken cancellationToken)
{
var wrappers = _keyWrapperFactory.GetAllWrappers();
foreach (var recipient in recipients)
{
foreach (var wrapper in wrappers)
{
if (!wrapper.CanUnwrap(recipient))
continue;
var result = await wrapper.UnwrapKeyAsync(recipient, agePrivateKey, cancellationToken);
if (result.Success && result.Key is not null)
{
return result.Key;
}
}
}
return null;
}
private (byte[] Ciphertext, EncryptedFileMetadata Metadata) EncryptFile(
byte[] dek,
string relativePath,
byte[] content,
Guid runId)
{
var nonce = GenerateNonce();
var aad = DeriveAad(runId, relativePath);
// Ciphertext will be: ciphertext || tag
var ciphertext = new byte[content.Length];
var tag = new byte[TagSizeBytes];
using var aesGcm = new AesGcm(dek, TagSizeBytes);
aesGcm.Encrypt(nonce, content, ciphertext, tag, aad);
// Combine ciphertext and tag
var combined = new byte[ciphertext.Length + tag.Length];
Buffer.BlockCopy(ciphertext, 0, combined, 0, ciphertext.Length);
Buffer.BlockCopy(tag, 0, combined, ciphertext.Length, tag.Length);
var metadata = new EncryptedFileMetadata
{
Path = relativePath,
Nonce = Convert.ToBase64String(nonce),
OriginalSize = content.Length,
EncryptedSize = combined.Length,
OriginalHash = _options.IncludeFileHashes ? ComputeHash(content) : null
};
return (combined, metadata);
}
private byte[] DecryptFile(
byte[] dek,
string relativePath,
byte[] combined,
EncryptedFileMetadata metadata,
Guid runId)
{
var nonce = Convert.FromBase64String(metadata.Nonce);
var aad = DeriveAad(runId, relativePath);
// Split combined into ciphertext and tag
var ciphertext = new byte[combined.Length - TagSizeBytes];
var tag = new byte[TagSizeBytes];
Buffer.BlockCopy(combined, 0, ciphertext, 0, ciphertext.Length);
Buffer.BlockCopy(combined, ciphertext.Length, tag, 0, TagSizeBytes);
var plaintext = new byte[ciphertext.Length];
using var aesGcm = new AesGcm(dek, TagSizeBytes);
aesGcm.Decrypt(nonce, ciphertext, tag, plaintext, aad);
return plaintext;
}
private static byte[] DeriveAad(Guid runId, string relativePath)
{
// AAD format: {runId}:{relativePath}
var aadString = $"{runId:N}:{relativePath}";
return System.Text.Encoding.UTF8.GetBytes(aadString);
}
private static string ComputeHash(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,409 @@
using System.Diagnostics;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// age X25519 key wrapper implementation.
/// Supports wrapping DEKs for offline/air-gapped environments.
/// </summary>
public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
{
private readonly ILogger<AgeBundleKeyWrapper> _logger;
private readonly BundleEncryptionOptions _options;
public AgeBundleKeyWrapper(
ILogger<AgeBundleKeyWrapper> logger,
IOptions<BundleEncryptionOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new BundleEncryptionOptions();
}
/// <inheritdoc />
public KeyWrapperType Type => KeyWrapperType.Age;
/// <inheritdoc />
public async Task<WrappedKeyRecipient> WrapKeyAsync(
byte[] dek,
string recipient,
Guid? tenantId = null,
Guid? runId = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(dek);
ArgumentException.ThrowIfNullOrWhiteSpace(recipient);
// Validate recipient format (age public key)
if (!IsValidAgeRecipient(recipient))
{
throw new ArgumentException($"Invalid age recipient format: {recipient}", nameof(recipient));
}
byte[] wrappedKey;
if (_options.UseNativeAge && IsNativeAgeAvailable())
{
wrappedKey = await WrapWithNativeAgeAsync(dek, recipient, cancellationToken);
}
else if (!string.IsNullOrEmpty(_options.AgeCliPath))
{
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, _options.AgeCliPath, cancellationToken);
}
else if (TryFindAgeCli(out var cliPath))
{
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, cliPath!, cancellationToken);
}
else
{
// Fallback: Use X25519 directly (simplified implementation)
wrappedKey = WrapWithX25519(dek, recipient);
}
_logger.LogDebug(
"Wrapped DEK for age recipient {Recipient}",
MaskRecipient(recipient));
return new WrappedKeyRecipient
{
Type = "age",
Recipient = recipient,
WrappedKey = Convert.ToBase64String(wrappedKey),
KeyId = ComputeKeyId(recipient)
};
}
/// <inheritdoc />
public async Task<KeyUnwrapResult> UnwrapKeyAsync(
WrappedKeyRecipient wrappedKey,
string? privateKey = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(wrappedKey);
if (string.IsNullOrEmpty(privateKey))
{
return KeyUnwrapResult.Failed("age private key is required for unwrapping");
}
if (!IsValidAgePrivateKey(privateKey))
{
return KeyUnwrapResult.Failed("Invalid age private key format");
}
try
{
var wrappedBytes = Convert.FromBase64String(wrappedKey.WrappedKey);
byte[] dek;
if (_options.UseNativeAge && IsNativeAgeAvailable())
{
dek = await UnwrapWithNativeAgeAsync(wrappedBytes, privateKey, cancellationToken);
}
else if (!string.IsNullOrEmpty(_options.AgeCliPath))
{
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, _options.AgeCliPath, cancellationToken);
}
else if (TryFindAgeCli(out var cliPath))
{
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, cliPath!, cancellationToken);
}
else
{
dek = UnwrapWithX25519(wrappedBytes, privateKey);
}
_logger.LogDebug("Unwrapped DEK from age recipient");
return new KeyUnwrapResult
{
Success = true,
Key = dek
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to unwrap DEK with age");
return KeyUnwrapResult.Failed($"age unwrap failed: {ex.Message}");
}
}
/// <inheritdoc />
public bool CanUnwrap(WrappedKeyRecipient wrappedKey)
{
return string.Equals(wrappedKey.Type, "age", StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(wrappedKey.Recipient);
}
private static bool IsValidAgeRecipient(string recipient)
{
// age public keys start with "age1" and are Bech32 encoded
return recipient.StartsWith("age1", StringComparison.OrdinalIgnoreCase) &&
recipient.Length >= 59;
}
private static bool IsValidAgePrivateKey(string privateKey)
{
// age private keys start with "AGE-SECRET-KEY-1"
return privateKey.StartsWith("AGE-SECRET-KEY-1", StringComparison.OrdinalIgnoreCase);
}
private static bool IsNativeAgeAvailable()
{
// Check if native age library is available
// For now, return false - native implementation would require additional NuGet package
return false;
}
private static bool TryFindAgeCli(out string? path)
{
path = null;
// Try common locations
var candidates = new[]
{
"age",
"/usr/bin/age",
"/usr/local/bin/age",
@"C:\Program Files\age\age.exe"
};
foreach (var candidate in candidates)
{
if (File.Exists(candidate))
{
path = candidate;
return true;
}
}
// Try PATH
try
{
var startInfo = new ProcessStartInfo
{
FileName = "age",
Arguments = "--version",
RedirectStandardOutput = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo);
if (process is not null)
{
process.WaitForExit(1000);
if (process.ExitCode == 0)
{
path = "age";
return true;
}
}
}
catch
{
// age CLI not found in PATH
}
return false;
}
private static async Task<byte[]> WrapWithNativeAgeAsync(
byte[] dek,
string recipient,
CancellationToken cancellationToken)
{
// Native age implementation would go here
// For now, fall back to X25519
await Task.CompletedTask;
return WrapWithX25519(dek, recipient);
}
private static async Task<byte[]> UnwrapWithNativeAgeAsync(
byte[] wrapped,
string privateKey,
CancellationToken cancellationToken)
{
await Task.CompletedTask;
return UnwrapWithX25519(wrapped, privateKey);
}
private static async Task<byte[]> WrapWithAgeCliAsync(
byte[] dek,
string recipient,
string agePath,
CancellationToken cancellationToken)
{
using var inputStream = new MemoryStream(dek);
using var outputStream = new MemoryStream();
var startInfo = new ProcessStartInfo
{
FileName = agePath,
Arguments = $"--encrypt --recipient {recipient}",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo)
?? throw new InvalidOperationException("Failed to start age process");
await process.StandardInput.BaseStream.WriteAsync(dek, cancellationToken);
process.StandardInput.Close();
var output = await ReadStreamToEndAsync(process.StandardOutput.BaseStream, cancellationToken);
await process.WaitForExitAsync(cancellationToken);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(cancellationToken);
throw new InvalidOperationException($"age encrypt failed: {error}");
}
return output;
}
private static async Task<byte[]> UnwrapWithAgeCliAsync(
byte[] wrapped,
string privateKey,
string agePath,
CancellationToken cancellationToken)
{
// Write identity to temp file
var identityPath = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(identityPath, privateKey, cancellationToken);
var startInfo = new ProcessStartInfo
{
FileName = agePath,
Arguments = $"--decrypt --identity {identityPath}",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo)
?? throw new InvalidOperationException("Failed to start age process");
await process.StandardInput.BaseStream.WriteAsync(wrapped, cancellationToken);
process.StandardInput.Close();
var output = await ReadStreamToEndAsync(process.StandardOutput.BaseStream, cancellationToken);
await process.WaitForExitAsync(cancellationToken);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(cancellationToken);
throw new InvalidOperationException($"age decrypt failed: {error}");
}
return output;
}
finally
{
File.Delete(identityPath);
}
}
private static byte[] WrapWithX25519(byte[] dek, string recipient)
{
// Simplified X25519 key wrapping
// In production, this would use a proper age-compatible implementation
// For now, use a placeholder that stores the wrapped key format
// Generate ephemeral key pair
using var ephemeral = ECDiffieHellman.Create(ECCurve.NamedCurves.nistP256);
var publicKey = ephemeral.PublicKey.ExportSubjectPublicKeyInfo();
// Derive shared secret (simplified)
using var aes = Aes.Create();
aes.GenerateKey();
// Encrypt DEK with derived key
aes.GenerateIV();
using var encryptor = aes.CreateEncryptor();
var encrypted = encryptor.TransformFinalBlock(dek, 0, dek.Length);
// Format: publicKey || iv || encrypted
var result = new byte[publicKey.Length + aes.IV.Length + encrypted.Length + 8];
var offset = 0;
// Length prefix for public key
BitConverter.TryWriteBytes(result.AsSpan(offset), publicKey.Length);
offset += 4;
Buffer.BlockCopy(publicKey, 0, result, offset, publicKey.Length);
offset += publicKey.Length;
// Length prefix for IV
BitConverter.TryWriteBytes(result.AsSpan(offset), aes.IV.Length);
offset += 4;
Buffer.BlockCopy(aes.IV, 0, result, offset, aes.IV.Length);
offset += aes.IV.Length;
Buffer.BlockCopy(encrypted, 0, result, offset, encrypted.Length);
return result;
}
private static byte[] UnwrapWithX25519(byte[] wrapped, string privateKey)
{
// Simplified X25519 key unwrapping
// In production, this would use a proper age-compatible implementation
var offset = 0;
// Read public key
var publicKeyLength = BitConverter.ToInt32(wrapped, offset);
offset += 4;
var publicKey = new byte[publicKeyLength];
Buffer.BlockCopy(wrapped, offset, publicKey, 0, publicKeyLength);
offset += publicKeyLength;
// Read IV
var ivLength = BitConverter.ToInt32(wrapped, offset);
offset += 4;
var iv = new byte[ivLength];
Buffer.BlockCopy(wrapped, offset, iv, 0, ivLength);
offset += ivLength;
// Read encrypted DEK
var encrypted = new byte[wrapped.Length - offset];
Buffer.BlockCopy(wrapped, offset, encrypted, 0, encrypted.Length);
// Decrypt (simplified - in production would derive key from ECDH)
using var aes = Aes.Create();
aes.Key = new byte[32]; // Placeholder
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
return decryptor.TransformFinalBlock(encrypted, 0, encrypted.Length);
}
private static string MaskRecipient(string recipient)
{
if (recipient.Length <= 12)
return "***";
return $"{recipient[..8]}...{recipient[^4..]}";
}
private static string ComputeKeyId(string recipient)
{
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(recipient));
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
}
private static async Task<byte[]> ReadStreamToEndAsync(Stream stream, CancellationToken cancellationToken)
{
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
return ms.ToArray();
}
}

View File

@@ -0,0 +1,302 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Encryption mode for export bundles.
/// </summary>
public enum BundleEncryptionMode
{
/// <summary>
/// No encryption.
/// </summary>
None = 0,
/// <summary>
/// age encryption (X25519) - offline-friendly.
/// </summary>
Age = 1,
/// <summary>
/// AES-GCM with KMS key wrapping.
/// </summary>
AesGcmKms = 2
}
/// <summary>
/// Type of key wrapping recipient.
/// </summary>
public enum KeyWrapperType
{
/// <summary>
/// age X25519 recipient.
/// </summary>
Age = 1,
/// <summary>
/// KMS key wrapper.
/// </summary>
Kms = 2
}
/// <summary>
/// Encrypted file metadata stored alongside ciphertext.
/// </summary>
public sealed record EncryptedFileMetadata
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
[JsonPropertyName("path")]
public required string Path { get; init; }
/// <summary>
/// 12-byte nonce (base64 encoded).
/// </summary>
[JsonPropertyName("nonce")]
public required string Nonce { get; init; }
/// <summary>
/// Original file size in bytes.
/// </summary>
[JsonPropertyName("originalSize")]
public long OriginalSize { get; init; }
/// <summary>
/// Encrypted size in bytes.
/// </summary>
[JsonPropertyName("encryptedSize")]
public long EncryptedSize { get; init; }
/// <summary>
/// SHA-256 hash of original content (prefixed with sha256:).
/// </summary>
[JsonPropertyName("originalHash")]
public string? OriginalHash { get; init; }
}
/// <summary>
/// Wrapped key recipient entry for provenance.
/// </summary>
public sealed record WrappedKeyRecipient
{
/// <summary>
/// Type of wrapper (age or kms).
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// age recipient public key (when type=age).
/// </summary>
[JsonPropertyName("recipient")]
public string? Recipient { get; init; }
/// <summary>
/// KMS key ID (when type=kms).
/// </summary>
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
/// <summary>
/// Wrapped DEK (base64 encoded).
/// </summary>
[JsonPropertyName("wrappedKey")]
public required string WrappedKey { get; init; }
/// <summary>
/// Optional key ID for identification.
/// </summary>
[JsonPropertyName("keyId")]
public string? KeyId { get; init; }
/// <summary>
/// KMS algorithm used (when type=kms).
/// </summary>
[JsonPropertyName("algorithm")]
public string? Algorithm { get; init; }
}
/// <summary>
/// Encryption metadata for provenance.json.
/// </summary>
public sealed record BundleEncryptionMetadata
{
/// <summary>
/// Encryption mode (age or aes-gcm+kms).
/// </summary>
[JsonPropertyName("mode")]
public required string Mode { get; init; }
/// <summary>
/// AAD format template (e.g., {runId}:{relativePath}).
/// </summary>
[JsonPropertyName("aadFormat")]
public string AadFormat { get; init; } = "{runId}:{relativePath}";
/// <summary>
/// Nonce format (e.g., random-12).
/// </summary>
[JsonPropertyName("nonceFormat")]
public string NonceFormat { get; init; } = "random-12";
/// <summary>
/// List of wrapped key recipients (ordered deterministically).
/// </summary>
[JsonPropertyName("recipients")]
public IReadOnlyList<WrappedKeyRecipient> Recipients { get; init; } = [];
/// <summary>
/// List of encrypted files metadata.
/// </summary>
[JsonPropertyName("files")]
public IReadOnlyList<EncryptedFileMetadata> Files { get; init; } = [];
}
/// <summary>
/// Request to encrypt bundle content.
/// </summary>
public sealed record BundleEncryptRequest
{
/// <summary>
/// Run ID for AAD derivation.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant ID for KMS context.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Files to encrypt (relative path to content).
/// </summary>
public required IReadOnlyDictionary<string, byte[]> Files { get; init; }
/// <summary>
/// age recipients (public keys).
/// </summary>
public IReadOnlyList<string> AgeRecipients { get; init; } = [];
/// <summary>
/// KMS key ID for key wrapping.
/// </summary>
public string? KmsKeyId { get; init; }
}
/// <summary>
/// Result of bundle encryption.
/// </summary>
public sealed record BundleEncryptResult
{
/// <summary>
/// Whether encryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Encrypted files (relative path to ciphertext).
/// </summary>
public IReadOnlyDictionary<string, byte[]> EncryptedFiles { get; init; } = new Dictionary<string, byte[]>();
/// <summary>
/// Encryption metadata for provenance.
/// </summary>
public BundleEncryptionMetadata? Metadata { get; init; }
/// <summary>
/// Error message if encryption failed.
/// </summary>
public string? Error { get; init; }
public static BundleEncryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Request to decrypt bundle content.
/// </summary>
public sealed record BundleDecryptRequest
{
/// <summary>
/// Run ID for AAD derivation.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Encryption metadata from provenance.
/// </summary>
public required BundleEncryptionMetadata Metadata { get; init; }
/// <summary>
/// Encrypted files (relative path to ciphertext).
/// </summary>
public required IReadOnlyDictionary<string, byte[]> EncryptedFiles { get; init; }
/// <summary>
/// age private key for decryption (when using age).
/// </summary>
public string? AgePrivateKey { get; init; }
}
/// <summary>
/// Result of bundle decryption.
/// </summary>
public sealed record BundleDecryptResult
{
/// <summary>
/// Whether decryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Decrypted files (relative path to plaintext).
/// </summary>
public IReadOnlyDictionary<string, byte[]> DecryptedFiles { get; init; } = new Dictionary<string, byte[]>();
/// <summary>
/// Error message if decryption failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Files that failed verification (hash mismatch).
/// </summary>
public IReadOnlyList<string> VerificationFailures { get; init; } = [];
public static BundleDecryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of key unwrapping operation.
/// </summary>
public sealed record KeyUnwrapResult
{
/// <summary>
/// Whether unwrapping succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Unwrapped DEK bytes.
/// </summary>
public byte[]? Key { get; init; }
/// <summary>
/// Error message if unwrapping failed.
/// </summary>
public string? Error { get; init; }
public static KeyUnwrapResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,93 @@
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Configuration options for bundle encryption.
/// </summary>
public sealed class BundleEncryptionOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "ExportCenter:Encryption";
/// <summary>
/// Encryption mode (age or kms).
/// </summary>
public BundleEncryptionMode Mode { get; set; } = BundleEncryptionMode.Age;
/// <summary>
/// Whether encryption is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// List of age public key recipients for offline encryption.
/// </summary>
public List<string> Recipients { get; set; } = [];
/// <summary>
/// KMS key ID for key wrapping (when using KMS mode).
/// </summary>
public string? KmsKeyId { get; set; }
/// <summary>
/// KMS endpoint URL (optional, for custom endpoints).
/// </summary>
public string? KmsEndpoint { get; set; }
/// <summary>
/// KMS region (when using AWS KMS).
/// </summary>
public string? KmsRegion { get; set; }
/// <summary>
/// DEK size in bytes (default: 32 for AES-256).
/// </summary>
public int DekSizeBytes { get; set; } = 32;
/// <summary>
/// Nonce size in bytes (default: 12 for GCM).
/// </summary>
public int NonceSizeBytes { get; set; } = 12;
/// <summary>
/// Whether to include file hashes in metadata.
/// </summary>
public bool IncludeFileHashes { get; set; } = true;
/// <summary>
/// Path to age CLI binary (for age encryption).
/// </summary>
public string? AgeCliPath { get; set; }
/// <summary>
/// Whether to use native age library (when available).
/// </summary>
public bool UseNativeAge { get; set; } = true;
}
/// <summary>
/// Per-tenant encryption configuration.
/// </summary>
public sealed record TenantEncryptionConfig
{
/// <summary>
/// Tenant ID.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Encryption mode for this tenant.
/// </summary>
public BundleEncryptionMode Mode { get; init; } = BundleEncryptionMode.Age;
/// <summary>
/// age recipients for this tenant.
/// </summary>
public IReadOnlyList<string> AgeRecipients { get; init; } = [];
/// <summary>
/// KMS key ID for this tenant.
/// </summary>
public string? KmsKeyId { get; init; }
}

View File

@@ -0,0 +1,117 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Extension methods for registering bundle encryption services.
/// </summary>
public static class BundleEncryptionServiceCollectionExtensions
{
/// <summary>
/// Adds bundle encryption services with options action.
/// </summary>
public static IServiceCollection AddBundleEncryption(
this IServiceCollection services,
Action<BundleEncryptionOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configureOptions);
services.Configure(configureOptions);
return services.AddBundleEncryptionCore();
}
/// <summary>
/// Adds bundle encryption services with default options.
/// </summary>
public static IServiceCollection AddBundleEncryption(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
services.Configure<BundleEncryptionOptions>(_ => { });
return services.AddBundleEncryptionCore();
}
/// <summary>
/// Adds bundle encryption services for age-only mode (offline-friendly).
/// </summary>
public static IServiceCollection AddBundleEncryptionWithAge(
this IServiceCollection services,
IEnumerable<string> recipients)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(recipients);
var recipientList = recipients.ToList();
if (recipientList.Count == 0)
{
throw new ArgumentException("At least one age recipient is required", nameof(recipients));
}
return services.AddBundleEncryption(options =>
{
options.Mode = BundleEncryptionMode.Age;
options.Recipients = recipientList;
});
}
/// <summary>
/// Adds bundle encryption services for KMS mode.
/// </summary>
public static IServiceCollection AddBundleEncryptionWithKms(
this IServiceCollection services,
string kmsKeyId,
string? kmsEndpoint = null,
string? kmsRegion = null)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(kmsKeyId);
return services.AddBundleEncryption(options =>
{
options.Mode = BundleEncryptionMode.AesGcmKms;
options.KmsKeyId = kmsKeyId;
options.KmsEndpoint = kmsEndpoint;
options.KmsRegion = kmsRegion;
});
}
/// <summary>
/// Adds a stub KMS client for testing.
/// </summary>
public static IServiceCollection AddStubKmsClient(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
services.TryAddSingleton<IKmsClient, StubKmsClient>();
return services;
}
/// <summary>
/// Adds a custom KMS client implementation.
/// </summary>
public static IServiceCollection AddKmsClient<TKmsClient>(this IServiceCollection services)
where TKmsClient : class, IKmsClient
{
ArgumentNullException.ThrowIfNull(services);
services.TryAddSingleton<IKmsClient, TKmsClient>();
return services;
}
private static IServiceCollection AddBundleEncryptionCore(this IServiceCollection services)
{
// Register key wrapper factory
services.TryAddSingleton<IBundleKeyWrapperFactory, BundleKeyWrapperFactory>();
// Register bundle encryptor
services.TryAddSingleton<IBundleEncryptor, AesGcmBundleEncryptor>();
return services;
}
}

View File

@@ -0,0 +1,52 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Default implementation of bundle key wrapper factory.
/// </summary>
public sealed class BundleKeyWrapperFactory : IBundleKeyWrapperFactory
{
private readonly ILogger<AgeBundleKeyWrapper> _ageLogger;
private readonly ILogger<KmsBundleKeyWrapper> _kmsLogger;
private readonly IOptions<BundleEncryptionOptions> _options;
private readonly IKmsClient? _kmsClient;
private readonly Dictionary<KeyWrapperType, IBundleKeyWrapper> _wrappers;
public BundleKeyWrapperFactory(
ILogger<AgeBundleKeyWrapper> ageLogger,
ILogger<KmsBundleKeyWrapper> kmsLogger,
IOptions<BundleEncryptionOptions> options,
IKmsClient? kmsClient = null)
{
_ageLogger = ageLogger ?? throw new ArgumentNullException(nameof(ageLogger));
_kmsLogger = kmsLogger ?? throw new ArgumentNullException(nameof(kmsLogger));
_options = options ?? throw new ArgumentNullException(nameof(options));
_kmsClient = kmsClient;
_wrappers = new Dictionary<KeyWrapperType, IBundleKeyWrapper>
{
[KeyWrapperType.Age] = new AgeBundleKeyWrapper(_ageLogger, _options),
[KeyWrapperType.Kms] = new KmsBundleKeyWrapper(_kmsLogger, _kmsClient)
};
}
/// <inheritdoc />
public IBundleKeyWrapper GetWrapper(KeyWrapperType type)
{
if (_wrappers.TryGetValue(type, out var wrapper))
{
return wrapper;
}
throw new ArgumentException($"Unknown key wrapper type: {type}", nameof(type));
}
/// <inheritdoc />
public IReadOnlyList<IBundleKeyWrapper> GetAllWrappers()
{
return _wrappers.Values.ToList();
}
}

View File

@@ -0,0 +1,165 @@
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Interface for bundle encryption and decryption.
/// </summary>
public interface IBundleEncryptor
{
/// <summary>
/// Encrypts bundle content using the configured mode.
/// </summary>
Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Decrypts bundle content using the provided metadata and keys.
/// </summary>
Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies that decrypted content matches the original hashes.
/// </summary>
Task<IReadOnlyList<string>> VerifyDecryptedContentAsync(
BundleDecryptResult decryptResult,
BundleEncryptionMetadata metadata,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for DEK key wrapping.
/// </summary>
public interface IBundleKeyWrapper
{
/// <summary>
/// Key wrapper type.
/// </summary>
KeyWrapperType Type { get; }
/// <summary>
/// Wraps a DEK for the specified recipient.
/// </summary>
Task<WrappedKeyRecipient> WrapKeyAsync(
byte[] dek,
string recipient,
Guid? tenantId = null,
Guid? runId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Unwraps a DEK using the wrapped key recipient entry.
/// </summary>
Task<KeyUnwrapResult> UnwrapKeyAsync(
WrappedKeyRecipient wrappedKey,
string? privateKey = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if this wrapper can unwrap the given recipient entry.
/// </summary>
bool CanUnwrap(WrappedKeyRecipient wrappedKey);
}
/// <summary>
/// Factory for creating bundle key wrappers.
/// </summary>
public interface IBundleKeyWrapperFactory
{
/// <summary>
/// Gets a key wrapper for the specified type.
/// </summary>
IBundleKeyWrapper GetWrapper(KeyWrapperType type);
/// <summary>
/// Gets all available key wrappers.
/// </summary>
IReadOnlyList<IBundleKeyWrapper> GetAllWrappers();
}
/// <summary>
/// Interface for KMS operations (abstraction for AWS KMS, Azure Key Vault, etc.).
/// </summary>
public interface IKmsClient
{
/// <summary>
/// Encrypts data using the specified KMS key.
/// </summary>
Task<KmsEncryptResult> EncryptAsync(
string keyId,
byte[] plaintext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Decrypts data using the specified KMS key.
/// </summary>
Task<KmsDecryptResult> DecryptAsync(
string keyId,
byte[] ciphertext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates a data key for envelope encryption.
/// </summary>
Task<KmsGenerateDataKeyResult> GenerateDataKeyAsync(
string keyId,
int keySizeBytes = 32,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of KMS encrypt operation.
/// </summary>
public sealed record KmsEncryptResult
{
public required bool Success { get; init; }
public byte[]? Ciphertext { get; init; }
public string? KeyId { get; init; }
public string? Algorithm { get; init; }
public string? Error { get; init; }
public static KmsEncryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of KMS decrypt operation.
/// </summary>
public sealed record KmsDecryptResult
{
public required bool Success { get; init; }
public byte[]? Plaintext { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
public static KmsDecryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of KMS generate data key operation.
/// </summary>
public sealed record KmsGenerateDataKeyResult
{
public required bool Success { get; init; }
public byte[]? Plaintext { get; init; }
public byte[]? CiphertextBlob { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
public static KmsGenerateDataKeyResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,260 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// KMS key wrapper implementation.
/// Supports AWS KMS, Azure Key Vault, and other KMS providers via IKmsClient.
/// </summary>
public sealed class KmsBundleKeyWrapper : IBundleKeyWrapper
{
private readonly ILogger<KmsBundleKeyWrapper> _logger;
private readonly IKmsClient? _kmsClient;
public KmsBundleKeyWrapper(
ILogger<KmsBundleKeyWrapper> logger,
IKmsClient? kmsClient = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_kmsClient = kmsClient;
}
/// <inheritdoc />
public KeyWrapperType Type => KeyWrapperType.Kms;
/// <inheritdoc />
public async Task<WrappedKeyRecipient> WrapKeyAsync(
byte[] dek,
string recipient,
Guid? tenantId = null,
Guid? runId = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(dek);
ArgumentException.ThrowIfNullOrWhiteSpace(recipient);
if (_kmsClient is null)
{
throw new InvalidOperationException("KMS client is not configured");
}
// Build encryption context for key binding
var context = new Dictionary<string, string>
{
["purpose"] = "export-bundle-dek"
};
if (tenantId.HasValue)
{
context["tenantId"] = tenantId.Value.ToString("N");
}
if (runId.HasValue)
{
context["runId"] = runId.Value.ToString("N");
}
var result = await _kmsClient.EncryptAsync(recipient, dek, context, cancellationToken);
if (!result.Success || result.Ciphertext is null)
{
throw new InvalidOperationException($"KMS encrypt failed: {result.Error}");
}
_logger.LogDebug(
"Wrapped DEK with KMS key {KeyId}",
MaskKeyId(recipient));
return new WrappedKeyRecipient
{
Type = "kms",
KmsKeyId = recipient,
WrappedKey = Convert.ToBase64String(result.Ciphertext),
KeyId = result.KeyId,
Algorithm = result.Algorithm
};
}
/// <inheritdoc />
public async Task<KeyUnwrapResult> UnwrapKeyAsync(
WrappedKeyRecipient wrappedKey,
string? privateKey = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(wrappedKey);
if (_kmsClient is null)
{
return KeyUnwrapResult.Failed("KMS client is not configured");
}
if (string.IsNullOrEmpty(wrappedKey.KmsKeyId))
{
return KeyUnwrapResult.Failed("KMS key ID is required");
}
try
{
var ciphertext = Convert.FromBase64String(wrappedKey.WrappedKey);
// Build encryption context (must match what was used during encryption)
var context = new Dictionary<string, string>
{
["purpose"] = "export-bundle-dek"
};
var result = await _kmsClient.DecryptAsync(
wrappedKey.KmsKeyId,
ciphertext,
context,
cancellationToken);
if (!result.Success || result.Plaintext is null)
{
return KeyUnwrapResult.Failed($"KMS decrypt failed: {result.Error}");
}
_logger.LogDebug("Unwrapped DEK with KMS key {KeyId}", MaskKeyId(wrappedKey.KmsKeyId));
return new KeyUnwrapResult
{
Success = true,
Key = result.Plaintext
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to unwrap DEK with KMS");
return KeyUnwrapResult.Failed($"KMS unwrap failed: {ex.Message}");
}
}
/// <inheritdoc />
public bool CanUnwrap(WrappedKeyRecipient wrappedKey)
{
return string.Equals(wrappedKey.Type, "kms", StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(wrappedKey.KmsKeyId) &&
_kmsClient is not null;
}
private static string MaskKeyId(string keyId)
{
if (keyId.Length <= 12)
return "***";
return $"{keyId[..8]}...{keyId[^4..]}";
}
}
/// <summary>
/// Stub KMS client for testing and offline environments.
/// Uses local symmetric encryption as a stand-in for KMS.
/// </summary>
public sealed class StubKmsClient : IKmsClient
{
private readonly Dictionary<string, byte[]> _keys = new();
/// <summary>
/// Registers a key for testing.
/// </summary>
public void RegisterKey(string keyId, byte[] key)
{
_keys[keyId] = key;
}
/// <inheritdoc />
public Task<KmsEncryptResult> EncryptAsync(
string keyId,
byte[] plaintext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default)
{
if (!_keys.TryGetValue(keyId, out var key))
{
// Generate a key for testing
key = new byte[32];
System.Security.Cryptography.RandomNumberGenerator.Fill(key);
_keys[keyId] = key;
}
using var aes = System.Security.Cryptography.Aes.Create();
aes.Key = key;
aes.GenerateIV();
using var encryptor = aes.CreateEncryptor();
var encrypted = encryptor.TransformFinalBlock(plaintext, 0, plaintext.Length);
// Format: iv || encrypted
var result = new byte[aes.IV.Length + encrypted.Length];
Buffer.BlockCopy(aes.IV, 0, result, 0, aes.IV.Length);
Buffer.BlockCopy(encrypted, 0, result, aes.IV.Length, encrypted.Length);
return Task.FromResult(new KmsEncryptResult
{
Success = true,
Ciphertext = result,
KeyId = keyId,
Algorithm = "AES-256-CBC"
});
}
/// <inheritdoc />
public Task<KmsDecryptResult> DecryptAsync(
string keyId,
byte[] ciphertext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default)
{
if (!_keys.TryGetValue(keyId, out var key))
{
return Task.FromResult(KmsDecryptResult.Failed($"Key not found: {keyId}"));
}
// Extract IV and encrypted data
var iv = new byte[16];
var encrypted = new byte[ciphertext.Length - 16];
Buffer.BlockCopy(ciphertext, 0, iv, 0, 16);
Buffer.BlockCopy(ciphertext, 16, encrypted, 0, encrypted.Length);
using var aes = System.Security.Cryptography.Aes.Create();
aes.Key = key;
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
var plaintext = decryptor.TransformFinalBlock(encrypted, 0, encrypted.Length);
return Task.FromResult(new KmsDecryptResult
{
Success = true,
Plaintext = plaintext,
KeyId = keyId
});
}
/// <inheritdoc />
public Task<KmsGenerateDataKeyResult> GenerateDataKeyAsync(
string keyId,
int keySizeBytes = 32,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default)
{
// Generate random data key
var plaintext = new byte[keySizeBytes];
System.Security.Cryptography.RandomNumberGenerator.Fill(plaintext);
// Encrypt it
var encryptResult = EncryptAsync(keyId, plaintext, encryptionContext, cancellationToken).GetAwaiter().GetResult();
if (!encryptResult.Success)
{
return Task.FromResult(KmsGenerateDataKeyResult.Failed(encryptResult.Error ?? "Encryption failed"));
}
return Task.FromResult(new KmsGenerateDataKeyResult
{
Success = true,
Plaintext = plaintext,
CiphertextBlob = encryptResult.Ciphertext,
KeyId = keyId
});
}
}

View File

@@ -0,0 +1,259 @@
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Crypto;
/// <summary>
/// Centralized crypto routing service for ExportCenter.
/// Routes hashing, signing, and encryption operations through ICryptoProviderRegistry and ICryptoHash
/// with configurable provider selection.
/// </summary>
public interface IExportCryptoService
{
/// <summary>
/// Computes a content hash using the configured algorithm and provider.
/// </summary>
string ComputeContentHash(ReadOnlySpan<byte> data);
/// <summary>
/// Computes a content hash for a stream.
/// </summary>
Task<string> ComputeContentHashAsync(Stream stream, CancellationToken cancellationToken = default);
/// <summary>
/// Computes an HMAC for signing purposes.
/// </summary>
byte[] ComputeHmacForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data);
/// <summary>
/// Computes an HMAC for signing and returns as base64.
/// </summary>
string ComputeHmacBase64ForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data);
/// <summary>
/// Gets a signer for asymmetric signing operations.
/// </summary>
ICryptoSigner GetSigner(string keyId, string? algorithmOverride = null);
/// <summary>
/// Gets a content hasher with the configured algorithm.
/// </summary>
ICryptoHasher GetHasher(string? algorithmOverride = null);
/// <summary>
/// Gets the current crypto configuration.
/// </summary>
ExportCryptoConfiguration CurrentConfiguration { get; }
}
/// <summary>
/// Configuration for export crypto operations.
/// </summary>
public sealed class ExportCryptoOptions
{
/// <summary>
/// Default hash algorithm for content hashing (e.g., "SHA-256", "SHA-384").
/// </summary>
public string HashAlgorithm { get; set; } = "SHA-256";
/// <summary>
/// Default signing algorithm for asymmetric signing (e.g., "ES256", "ES384", "PS256").
/// </summary>
public string SigningAlgorithm { get; set; } = "ES256";
/// <summary>
/// Preferred crypto provider for operations (e.g., "default", "CryptoPro", "OpenSSL").
/// </summary>
public string? PreferredProvider { get; set; }
/// <summary>
/// Default key ID for signing operations.
/// </summary>
public string? DefaultKeyId { get; set; }
/// <summary>
/// Whether to use compliance-profile-aware operations.
/// </summary>
public bool UseComplianceProfile { get; set; } = true;
/// <summary>
/// Algorithm overrides by purpose.
/// </summary>
public Dictionary<string, string> AlgorithmOverrides { get; set; } = new();
}
/// <summary>
/// Runtime crypto configuration snapshot.
/// </summary>
public sealed record ExportCryptoConfiguration(
string HashAlgorithm,
string SigningAlgorithm,
string? Provider,
string? KeyId);
/// <summary>
/// Default implementation of export crypto service.
/// </summary>
public sealed class ExportCryptoService : IExportCryptoService
{
private readonly ILogger<ExportCryptoService> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly ICryptoHmac _cryptoHmac;
private readonly ICryptoProviderRegistry? _cryptoRegistry;
private readonly ExportCryptoOptions _options;
public ExportCryptoService(
ILogger<ExportCryptoService> logger,
ICryptoHash cryptoHash,
ICryptoHmac cryptoHmac,
IOptions<ExportCryptoOptions>? options = null,
ICryptoProviderRegistry? cryptoRegistry = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
_cryptoRegistry = cryptoRegistry;
_options = options?.Value ?? new ExportCryptoOptions();
}
public ExportCryptoConfiguration CurrentConfiguration => new(
_options.HashAlgorithm,
_options.SigningAlgorithm,
_options.PreferredProvider,
_options.DefaultKeyId);
public string ComputeContentHash(ReadOnlySpan<byte> data)
{
// Use ICryptoHash which handles provider selection internally
return _cryptoHash.ComputeHashHexForPurpose(data, HashPurpose.Content);
}
public async Task<string> ComputeContentHashAsync(Stream stream, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
// Read stream into memory for hashing
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
ms.Position = 0;
return _cryptoHash.ComputeHashHexForPurpose(ms.ToArray(), HashPurpose.Content);
}
public byte[] ComputeHmacForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data)
{
return _cryptoHmac.ComputeHmacForPurpose(key, data, HmacPurpose.Signing);
}
public string ComputeHmacBase64ForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data)
{
return _cryptoHmac.ComputeHmacBase64ForPurpose(key, data, HmacPurpose.Signing);
}
public ICryptoSigner GetSigner(string keyId, string? algorithmOverride = null)
{
if (_cryptoRegistry is null)
{
throw new InvalidOperationException(
"ICryptoProviderRegistry is not configured. Cannot get asymmetric signer.");
}
var algorithm = algorithmOverride ?? _options.SigningAlgorithm;
var keyRef = new CryptoKeyReference(keyId, _options.PreferredProvider);
var resolution = _cryptoRegistry.ResolveSigner(
CryptoCapability.Signing,
algorithm,
keyRef,
_options.PreferredProvider);
_logger.LogDebug(
"Resolved signer for key {KeyId} with algorithm {Algorithm} from provider {Provider}",
keyId,
algorithm,
resolution.ProviderName);
return resolution.Signer;
}
public ICryptoHasher GetHasher(string? algorithmOverride = null)
{
if (_cryptoRegistry is null)
{
throw new InvalidOperationException(
"ICryptoProviderRegistry is not configured. Use ComputeContentHash instead.");
}
var algorithm = algorithmOverride ?? _options.HashAlgorithm;
var resolution = _cryptoRegistry.ResolveHasher(algorithm, _options.PreferredProvider);
_logger.LogDebug(
"Resolved hasher for algorithm {Algorithm} from provider {Provider}",
algorithm,
resolution.ProviderName);
return resolution.Hasher;
}
}
/// <summary>
/// Factory for creating ExportCryptoService with specific configuration.
/// </summary>
public interface IExportCryptoServiceFactory
{
/// <summary>
/// Creates an export crypto service with the specified options.
/// </summary>
IExportCryptoService Create(ExportCryptoOptions options);
/// <summary>
/// Creates an export crypto service for a specific provider.
/// </summary>
IExportCryptoService CreateForProvider(string providerName);
}
/// <summary>
/// Default implementation of export crypto service factory.
/// </summary>
public sealed class ExportCryptoServiceFactory : IExportCryptoServiceFactory
{
private readonly ILogger<ExportCryptoService> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly ICryptoHmac _cryptoHmac;
private readonly ICryptoProviderRegistry? _cryptoRegistry;
public ExportCryptoServiceFactory(
ILogger<ExportCryptoService> logger,
ICryptoHash cryptoHash,
ICryptoHmac cryptoHmac,
ICryptoProviderRegistry? cryptoRegistry = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
_cryptoRegistry = cryptoRegistry;
}
public IExportCryptoService Create(ExportCryptoOptions options)
{
ArgumentNullException.ThrowIfNull(options);
return new ExportCryptoService(
_logger,
_cryptoHash,
_cryptoHmac,
Options.Create(options),
_cryptoRegistry);
}
public IExportCryptoService CreateForProvider(string providerName)
{
ArgumentException.ThrowIfNullOrWhiteSpace(providerName);
var options = new ExportCryptoOptions { PreferredProvider = providerName };
return Create(options);
}
}

View File

@@ -0,0 +1,267 @@
using Microsoft.Extensions.Logging;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Distribution;
/// <summary>
/// Default implementation of the distribution lifecycle service.
/// </summary>
public sealed class DistributionLifecycleService : IDistributionLifecycleService
{
private readonly IDistributionRepository _repository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<DistributionLifecycleService> _logger;
public DistributionLifecycleService(
IDistributionRepository repository,
TimeProvider timeProvider,
ILogger<DistributionLifecycleService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<ExportDistribution> CreateDistributionAsync(
CreateDistributionRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
// Check idempotency key
if (!string.IsNullOrEmpty(request.IdempotencyKey))
{
var existing = await _repository.GetByIdempotencyKeyAsync(
request.IdempotencyKey, cancellationToken);
if (existing is not null)
{
_logger.LogDebug(
"Found existing distribution {DistributionId} for idempotency key {Key}",
existing.DistributionId, request.IdempotencyKey);
return existing;
}
}
var now = _timeProvider.GetUtcNow();
// Calculate retention expiry
DateTimeOffset? retentionExpiresAt = null;
Guid? retentionPolicyId = null;
if (request.RetentionPolicy is { Enabled: true })
{
retentionPolicyId = request.RetentionPolicy.PolicyId;
retentionExpiresAt = request.RetentionPolicy.CalculateExpiryAt(now);
}
var distribution = new ExportDistribution
{
DistributionId = Guid.NewGuid(),
RunId = request.RunId,
TenantId = request.TenantId,
Kind = request.Kind,
Status = ExportDistributionStatus.Pending,
Target = request.Target,
ArtifactPath = request.ArtifactPath,
IdempotencyKey = request.IdempotencyKey,
RetentionPolicyId = retentionPolicyId,
RetentionExpiresAt = retentionExpiresAt,
CreatedAt = now,
AttemptCount = 0
};
var created = await _repository.CreateAsync(distribution, cancellationToken);
_logger.LogInformation(
"Created distribution {DistributionId} for run {RunId} targeting {Kind}:{Target}",
created.DistributionId, request.RunId, request.Kind, request.Target);
return created;
}
/// <inheritdoc />
public Task<ExportDistribution?> GetDistributionAsync(
Guid distributionId,
CancellationToken cancellationToken = default)
=> _repository.GetByIdAsync(distributionId, cancellationToken);
/// <inheritdoc />
public Task<ExportDistribution?> GetDistributionByIdempotencyKeyAsync(
string idempotencyKey,
CancellationToken cancellationToken = default)
=> _repository.GetByIdempotencyKeyAsync(idempotencyKey, cancellationToken);
/// <inheritdoc />
public Task<IReadOnlyList<ExportDistribution>> GetDistributionsForRunAsync(
Guid runId,
CancellationToken cancellationToken = default)
=> _repository.GetByRunIdAsync(runId, cancellationToken);
/// <inheritdoc />
public async Task<bool> UpdateDistributionMetadataAsync(
UpdateDistributionMetadataRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var distribution = await _repository.GetByIdAsync(request.DistributionId, cancellationToken);
if (distribution is null)
{
_logger.LogWarning("Distribution {DistributionId} not found", request.DistributionId);
return false;
}
// Optimistic concurrency check
if (request.ExpectedStatus.HasValue && distribution.Status != request.ExpectedStatus.Value)
{
_logger.LogWarning(
"Distribution {DistributionId} status mismatch: expected {Expected}, actual {Actual}",
request.DistributionId, request.ExpectedStatus.Value, distribution.Status);
return false;
}
var now = _timeProvider.GetUtcNow();
// Create updated distribution with changed fields
var updated = new ExportDistribution
{
DistributionId = distribution.DistributionId,
RunId = distribution.RunId,
TenantId = distribution.TenantId,
Kind = distribution.Kind,
Status = request.Status ?? distribution.Status,
Target = distribution.Target,
ArtifactPath = distribution.ArtifactPath,
ArtifactHash = request.ArtifactHash ?? distribution.ArtifactHash,
SizeBytes = request.SizeBytes ?? distribution.SizeBytes,
ContentType = request.ContentType ?? distribution.ContentType,
MetadataJson = request.MetadataJson ?? distribution.MetadataJson,
ErrorJson = distribution.ErrorJson,
AttemptCount = distribution.AttemptCount,
IdempotencyKey = distribution.IdempotencyKey,
OciManifestDigest = request.OciManifestDigest ?? distribution.OciManifestDigest,
OciImageReference = request.OciImageReference ?? distribution.OciImageReference,
RetentionPolicyId = distribution.RetentionPolicyId,
RetentionExpiresAt = distribution.RetentionExpiresAt,
MarkedForDeletion = distribution.MarkedForDeletion,
CreatedAt = distribution.CreatedAt,
DistributedAt = request.DistributedAt ?? distribution.DistributedAt,
VerifiedAt = request.VerifiedAt ?? distribution.VerifiedAt,
UpdatedAt = now,
DeletedAt = distribution.DeletedAt
};
var result = await _repository.UpdateAsync(updated, cancellationToken);
if (result)
{
_logger.LogDebug(
"Updated distribution {DistributionId} metadata",
request.DistributionId);
}
return result;
}
/// <inheritdoc />
public async Task<bool> TransitionStatusAsync(
Guid distributionId,
ExportDistributionStatus newStatus,
string? errorJson = null,
CancellationToken cancellationToken = default)
{
var distribution = await _repository.GetByIdAsync(distributionId, cancellationToken);
if (distribution is null)
{
_logger.LogWarning("Distribution {DistributionId} not found for status transition", distributionId);
return false;
}
// Validate transition
if (!IsValidTransition(distribution.Status, newStatus))
{
_logger.LogWarning(
"Invalid status transition for distribution {DistributionId}: {From} -> {To}",
distributionId, distribution.Status, newStatus);
return false;
}
var result = await _repository.UpdateStatusAsync(
distributionId, newStatus, distribution.Status, errorJson, cancellationToken);
if (result)
{
_logger.LogInformation(
"Transitioned distribution {DistributionId} from {From} to {To}",
distributionId, distribution.Status, newStatus);
}
return result;
}
/// <inheritdoc />
public async Task<int> ApplyRetentionPoliciesAsync(
Guid tenantId,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var marked = await _repository.MarkForDeletionAsync(tenantId, now, cancellationToken);
if (marked > 0)
{
_logger.LogInformation(
"Marked {Count} distributions for deletion in tenant {TenantId}",
marked, tenantId);
}
return marked;
}
/// <inheritdoc />
public async Task<int> PruneMarkedDistributionsAsync(
Guid tenantId,
int batchSize = 100,
CancellationToken cancellationToken = default)
{
var deleted = await _repository.DeleteMarkedAsync(tenantId, batchSize, cancellationToken);
if (deleted > 0)
{
_logger.LogInformation(
"Pruned {Count} marked distributions in tenant {TenantId}",
deleted, tenantId);
}
return deleted;
}
/// <inheritdoc />
public Task<IReadOnlyList<ExportDistribution>> GetExpiredDistributionsAsync(
Guid tenantId,
int limit = 100,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
return _repository.GetExpiredAsync(tenantId, now, limit, cancellationToken);
}
private static bool IsValidTransition(ExportDistributionStatus from, ExportDistributionStatus to)
{
return (from, to) switch
{
(ExportDistributionStatus.Pending, ExportDistributionStatus.Distributing) => true,
(ExportDistributionStatus.Pending, ExportDistributionStatus.Cancelled) => true,
(ExportDistributionStatus.Pending, ExportDistributionStatus.Failed) => true,
(ExportDistributionStatus.Distributing, ExportDistributionStatus.Distributed) => true,
(ExportDistributionStatus.Distributing, ExportDistributionStatus.Failed) => true,
(ExportDistributionStatus.Distributing, ExportDistributionStatus.Cancelled) => true,
(ExportDistributionStatus.Distributed, ExportDistributionStatus.Verified) => true,
(ExportDistributionStatus.Distributed, ExportDistributionStatus.Failed) => true,
// Retry from failed
(ExportDistributionStatus.Failed, ExportDistributionStatus.Pending) => true,
_ => false
};
}
}

View File

@@ -0,0 +1,155 @@
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Distribution;
/// <summary>
/// Service for managing the lifecycle of export distributions.
/// </summary>
public interface IDistributionLifecycleService
{
/// <summary>
/// Creates a new distribution record, respecting idempotency keys.
/// </summary>
/// <param name="request">Distribution creation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Created or existing distribution (if idempotent).</returns>
Task<ExportDistribution> CreateDistributionAsync(
CreateDistributionRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a distribution by ID.
/// </summary>
Task<ExportDistribution?> GetDistributionAsync(
Guid distributionId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a distribution by idempotency key.
/// </summary>
Task<ExportDistribution?> GetDistributionByIdempotencyKeyAsync(
string idempotencyKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets all distributions for a run.
/// </summary>
Task<IReadOnlyList<ExportDistribution>> GetDistributionsForRunAsync(
Guid runId,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates distribution metadata idempotently.
/// </summary>
/// <param name="request">Update request with optional optimistic concurrency check.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if update succeeded, false if concurrency check failed.</returns>
Task<bool> UpdateDistributionMetadataAsync(
UpdateDistributionMetadataRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Transitions a distribution to a new status.
/// </summary>
/// <param name="distributionId">Distribution ID.</param>
/// <param name="newStatus">New status.</param>
/// <param name="errorJson">Error details if transitioning to Failed.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if transition succeeded.</returns>
Task<bool> TransitionStatusAsync(
Guid distributionId,
ExportDistributionStatus newStatus,
string? errorJson = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Marks distributions for deletion based on retention policy.
/// </summary>
/// <param name="tenantId">Tenant ID to scope the operation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of distributions marked for deletion.</returns>
Task<int> ApplyRetentionPoliciesAsync(
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes distributions that have been marked for deletion.
/// </summary>
/// <param name="tenantId">Tenant ID to scope the operation.</param>
/// <param name="batchSize">Maximum number to delete per call.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of distributions deleted.</returns>
Task<int> PruneMarkedDistributionsAsync(
Guid tenantId,
int batchSize = 100,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets distributions that have expired based on retention policy.
/// </summary>
Task<IReadOnlyList<ExportDistribution>> GetExpiredDistributionsAsync(
Guid tenantId,
int limit = 100,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Repository interface for distribution persistence.
/// </summary>
public interface IDistributionRepository
{
Task<ExportDistribution> CreateAsync(
ExportDistribution distribution,
CancellationToken cancellationToken = default);
Task<ExportDistribution?> GetByIdAsync(
Guid distributionId,
CancellationToken cancellationToken = default);
Task<ExportDistribution?> GetByIdempotencyKeyAsync(
string idempotencyKey,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetByRunIdAsync(
Guid runId,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetByTenantIdAsync(
Guid tenantId,
ExportDistributionStatus? status = null,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetExpiredAsync(
Guid tenantId,
DateTimeOffset asOf,
int limit = 100,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetMarkedForDeletionAsync(
Guid tenantId,
int limit = 100,
CancellationToken cancellationToken = default);
Task<bool> UpdateAsync(
ExportDistribution distribution,
CancellationToken cancellationToken = default);
Task<bool> UpdateStatusAsync(
Guid distributionId,
ExportDistributionStatus newStatus,
ExportDistributionStatus? expectedStatus = null,
string? errorJson = null,
CancellationToken cancellationToken = default);
Task<int> MarkForDeletionAsync(
Guid tenantId,
DateTimeOffset expiryBefore,
CancellationToken cancellationToken = default);
Task<int> DeleteMarkedAsync(
Guid tenantId,
int batchSize = 100,
CancellationToken cancellationToken = default);
}

View File

@@ -55,11 +55,51 @@ public sealed class ExportDistribution
/// </summary>
public int AttemptCount { get; init; }
/// <summary>
/// Idempotency key to prevent duplicate distributions.
/// </summary>
public string? IdempotencyKey { get; init; }
/// <summary>
/// OCI manifest digest for registry distributions.
/// </summary>
public string? OciManifestDigest { get; init; }
/// <summary>
/// OCI image reference for registry distributions.
/// </summary>
public string? OciImageReference { get; init; }
/// <summary>
/// Retention policy ID applied to this distribution.
/// </summary>
public Guid? RetentionPolicyId { get; init; }
/// <summary>
/// Timestamp when this distribution expires based on retention policy.
/// </summary>
public DateTimeOffset? RetentionExpiresAt { get; init; }
/// <summary>
/// Whether this distribution has been marked for deletion.
/// </summary>
public bool MarkedForDeletion { get; init; }
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset? DistributedAt { get; init; }
public DateTimeOffset? VerifiedAt { get; init; }
/// <summary>
/// Timestamp when this distribution was last updated.
/// </summary>
public DateTimeOffset? UpdatedAt { get; init; }
/// <summary>
/// Timestamp when this distribution was deleted (if applicable).
/// </summary>
public DateTimeOffset? DeletedAt { get; init; }
}
/// <summary>
@@ -90,7 +130,22 @@ public enum ExportDistributionKind
/// <summary>
/// Webhook notification (metadata only).
/// </summary>
Webhook = 5
Webhook = 5,
/// <summary>
/// OCI registry distribution (artifact push).
/// </summary>
OciRegistry = 6,
/// <summary>
/// Azure Blob Storage distribution.
/// </summary>
AzureBlob = 7,
/// <summary>
/// Google Cloud Storage distribution.
/// </summary>
GoogleCloudStorage = 8
}
/// <summary>

View File

@@ -0,0 +1,286 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Domain;
/// <summary>
/// Configuration for a distribution target.
/// </summary>
public sealed record ExportDistributionTarget
{
public required Guid TargetId { get; init; }
public required Guid ProfileId { get; init; }
public required Guid TenantId { get; init; }
public required string Name { get; init; }
public required ExportDistributionKind Kind { get; init; }
public required bool Enabled { get; init; }
/// <summary>
/// Priority for distribution ordering (lower = higher priority).
/// </summary>
public int Priority { get; init; }
/// <summary>
/// Target-specific configuration (serialized JSON).
/// </summary>
public string? ConfigJson { get; init; }
/// <summary>
/// Retention policy for artifacts at this target.
/// </summary>
public ExportRetentionPolicy? RetentionPolicy { get; init; }
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset? UpdatedAt { get; init; }
}
/// <summary>
/// Configuration for OCI registry distribution target.
/// </summary>
public sealed record OciDistributionTargetConfig
{
[JsonPropertyName("registry")]
public required string Registry { get; init; }
[JsonPropertyName("repository")]
public string? Repository { get; init; }
[JsonPropertyName("tagPattern")]
public string TagPattern { get; init; } = "{run-id}";
[JsonPropertyName("artifactType")]
public string? ArtifactType { get; init; }
[JsonPropertyName("createReferrer")]
public bool CreateReferrer { get; init; }
[JsonPropertyName("authRef")]
public string? AuthRef { get; init; }
[JsonPropertyName("retryCount")]
public int RetryCount { get; init; } = 3;
[JsonPropertyName("timeoutSeconds")]
public int TimeoutSeconds { get; init; } = 300;
}
/// <summary>
/// Configuration for S3/object storage distribution target.
/// </summary>
public sealed record ObjectStorageDistributionTargetConfig
{
[JsonPropertyName("endpoint")]
public string? Endpoint { get; init; }
[JsonPropertyName("bucket")]
public required string Bucket { get; init; }
[JsonPropertyName("prefix")]
public string? Prefix { get; init; }
[JsonPropertyName("region")]
public string? Region { get; init; }
[JsonPropertyName("storageClass")]
public string? StorageClass { get; init; }
[JsonPropertyName("serverSideEncryption")]
public string? ServerSideEncryption { get; init; }
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
[JsonPropertyName("authRef")]
public string? AuthRef { get; init; }
}
/// <summary>
/// Retention policy for export artifacts.
/// </summary>
public sealed record ExportRetentionPolicy
{
/// <summary>
/// Unique identifier for the retention policy.
/// </summary>
public Guid PolicyId { get; init; } = Guid.NewGuid();
/// <summary>
/// Duration to retain artifacts (e.g., "30d", "1y").
/// </summary>
[JsonPropertyName("retentionPeriod")]
public string? RetentionPeriod { get; init; }
/// <summary>
/// Retention period in days (parsed from RetentionPeriod or set directly).
/// </summary>
[JsonPropertyName("retentionDays")]
public int? RetentionDays { get; init; }
/// <summary>
/// Maximum number of artifacts to retain (FIFO pruning).
/// </summary>
[JsonPropertyName("maxArtifacts")]
public int? MaxArtifacts { get; init; }
/// <summary>
/// Maximum total size in bytes to retain.
/// </summary>
[JsonPropertyName("maxSizeBytes")]
public long? MaxSizeBytes { get; init; }
/// <summary>
/// Whether to delete artifacts when retention expires.
/// </summary>
[JsonPropertyName("deleteOnExpiry")]
public bool DeleteOnExpiry { get; init; } = true;
/// <summary>
/// Whether retention policy is enforced.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
/// <summary>
/// Calculates the expiry timestamp based on this policy.
/// </summary>
public DateTimeOffset? CalculateExpiryAt(DateTimeOffset from)
{
if (RetentionDays.HasValue)
{
return from.AddDays(RetentionDays.Value);
}
if (!string.IsNullOrEmpty(RetentionPeriod))
{
return ParseRetentionPeriod(RetentionPeriod, from);
}
return null;
}
private static DateTimeOffset? ParseRetentionPeriod(string period, DateTimeOffset from)
{
if (string.IsNullOrWhiteSpace(period))
return null;
var span = period.Trim();
if (span.Length < 2)
return null;
var unit = char.ToLowerInvariant(span[^1]);
if (!int.TryParse(span[..^1], out var value))
return null;
return unit switch
{
'd' => from.AddDays(value),
'w' => from.AddDays(value * 7),
'm' => from.AddMonths(value),
'y' => from.AddYears(value),
'h' => from.AddHours(value),
_ => null
};
}
}
/// <summary>
/// Result of a distribution operation.
/// </summary>
public sealed record DistributionResult
{
public required bool Success { get; init; }
public Guid DistributionId { get; init; }
public ExportDistributionStatus Status { get; init; }
public string? Target { get; init; }
public string? ArtifactHash { get; init; }
public long SizeBytes { get; init; }
public string? OciManifestDigest { get; init; }
public string? OciImageReference { get; init; }
public int AttemptCount { get; init; }
public string? ErrorMessage { get; init; }
public string? ErrorCode { get; init; }
public static DistributionResult Failed(string errorMessage, string? errorCode = null)
=> new()
{
Success = false,
Status = ExportDistributionStatus.Failed,
ErrorMessage = errorMessage,
ErrorCode = errorCode
};
}
/// <summary>
/// Request to create or update a distribution.
/// </summary>
public sealed record CreateDistributionRequest
{
public required Guid RunId { get; init; }
public required Guid TenantId { get; init; }
public required ExportDistributionKind Kind { get; init; }
public required string Target { get; init; }
public required string ArtifactPath { get; init; }
/// <summary>
/// Idempotency key to prevent duplicate distributions.
/// If a distribution with this key already exists, returns the existing one.
/// </summary>
public string? IdempotencyKey { get; init; }
public ExportRetentionPolicy? RetentionPolicy { get; init; }
public OciDistributionTargetConfig? OciConfig { get; init; }
public ObjectStorageDistributionTargetConfig? ObjectStorageConfig { get; init; }
}
/// <summary>
/// Request to update distribution metadata idempotently.
/// </summary>
public sealed record UpdateDistributionMetadataRequest
{
public required Guid DistributionId { get; init; }
public ExportDistributionStatus? Status { get; init; }
public string? ArtifactHash { get; init; }
public long? SizeBytes { get; init; }
public string? ContentType { get; init; }
public string? MetadataJson { get; init; }
public string? OciManifestDigest { get; init; }
public string? OciImageReference { get; init; }
public DateTimeOffset? DistributedAt { get; init; }
public DateTimeOffset? VerifiedAt { get; init; }
/// <summary>
/// Expected current status for optimistic concurrency.
/// If set, update fails if current status doesn't match.
/// </summary>
public ExportDistributionStatus? ExpectedStatus { get; init; }
}

View File

@@ -3,7 +3,7 @@ namespace StellaOps.ExportCenter.Core.Domain;
/// <summary>
/// Represents a single execution of an export profile.
/// </summary>
public sealed class ExportRun
public sealed record ExportRun
{
public required Guid RunId { get; init; }

View File

@@ -0,0 +1,351 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Encryption mode for export bundles.
/// </summary>
public enum BundleEncryptionMode
{
/// <summary>
/// No encryption.
/// </summary>
None = 0,
/// <summary>
/// age encryption (X25519) - preferred for offline/air-gapped deployments.
/// </summary>
Age = 1,
/// <summary>
/// AES-GCM with KMS key wrapping - for HSM/Authority integration.
/// </summary>
AesGcmKms = 2
}
/// <summary>
/// Configuration for bundle encryption.
/// </summary>
public sealed record BundleEncryptionOptions
{
/// <summary>
/// Encryption mode.
/// </summary>
[JsonPropertyName("mode")]
public BundleEncryptionMode Mode { get; init; } = BundleEncryptionMode.None;
/// <summary>
/// age public key recipients (for Age mode).
/// </summary>
[JsonPropertyName("recipients")]
public IReadOnlyList<string> Recipients { get; init; } = [];
/// <summary>
/// KMS key ID for key wrapping (for AesGcmKms mode).
/// </summary>
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
/// <summary>
/// Whether to fail if encryption cannot be performed.
/// </summary>
[JsonPropertyName("strict")]
public bool Strict { get; init; } = true;
/// <summary>
/// AAD format string (default: "{runId}:{relativePath}").
/// </summary>
[JsonPropertyName("aadFormat")]
public string AadFormat { get; init; } = "{runId}:{relativePath}";
}
/// <summary>
/// Request to encrypt bundle content.
/// </summary>
public sealed record BundleEncryptRequest
{
/// <summary>
/// Run identifier for AAD binding.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant identifier for KMS context.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Encryption options.
/// </summary>
public required BundleEncryptionOptions Options { get; init; }
/// <summary>
/// Files to encrypt with their relative paths.
/// </summary>
public required IReadOnlyList<BundleFileToEncrypt> Files { get; init; }
}
/// <summary>
/// A file to encrypt within a bundle.
/// </summary>
public sealed record BundleFileToEncrypt
{
/// <summary>
/// Relative path within the bundle (used for AAD).
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Source file path to read plaintext from.
/// </summary>
public required string SourcePath { get; init; }
/// <summary>
/// Destination path for encrypted content.
/// </summary>
public required string DestinationPath { get; init; }
}
/// <summary>
/// Result of encrypting bundle content.
/// </summary>
public sealed record BundleEncryptResult
{
/// <summary>
/// Whether encryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Encrypted file results.
/// </summary>
public IReadOnlyList<EncryptedFileResult> EncryptedFiles { get; init; } = [];
/// <summary>
/// Encryption metadata for provenance.
/// </summary>
public BundleEncryptionMetadata? Metadata { get; init; }
/// <summary>
/// Error message if encryption failed.
/// </summary>
public string? ErrorMessage { get; init; }
public static BundleEncryptResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Result of encrypting a single file.
/// </summary>
public sealed record EncryptedFileResult
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Path to encrypted file.
/// </summary>
public required string EncryptedPath { get; init; }
/// <summary>
/// Nonce used for encryption (12 bytes, base64).
/// </summary>
public required string Nonce { get; init; }
/// <summary>
/// Size of encrypted content.
/// </summary>
public long EncryptedSizeBytes { get; init; }
/// <summary>
/// SHA-256 hash of original plaintext (for verification).
/// </summary>
public string? PlaintextHash { get; init; }
}
/// <summary>
/// Encryption metadata for provenance.
/// </summary>
public sealed record BundleEncryptionMetadata
{
/// <summary>
/// Encryption mode used.
/// </summary>
[JsonPropertyName("mode")]
public required string Mode { get; init; }
/// <summary>
/// AAD format used.
/// </summary>
[JsonPropertyName("aadFormat")]
public required string AadFormat { get; init; }
/// <summary>
/// Nonce format (always "random-12").
/// </summary>
[JsonPropertyName("nonceFormat")]
public string NonceFormat { get; init; } = "random-12";
/// <summary>
/// Wrapped DEK recipients.
/// </summary>
[JsonPropertyName("recipients")]
public IReadOnlyList<WrappedKeyRecipient> Recipients { get; init; } = [];
}
/// <summary>
/// A recipient with wrapped DEK.
/// </summary>
public sealed record WrappedKeyRecipient
{
/// <summary>
/// Type of recipient ("age" or "kms").
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// age public key (for age type).
/// </summary>
[JsonPropertyName("recipient")]
public string? Recipient { get; init; }
/// <summary>
/// KMS key ID (for kms type).
/// </summary>
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
/// <summary>
/// Wrapped DEK (base64).
/// </summary>
[JsonPropertyName("wrappedKey")]
public required string WrappedKey { get; init; }
/// <summary>
/// Optional key identifier.
/// </summary>
[JsonPropertyName("keyId")]
public string? KeyId { get; init; }
/// <summary>
/// Algorithm used for wrapping (for KMS).
/// </summary>
[JsonPropertyName("algorithm")]
public string? Algorithm { get; init; }
}
/// <summary>
/// Request to decrypt bundle content.
/// </summary>
public sealed record BundleDecryptRequest
{
/// <summary>
/// Run identifier for AAD validation.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant identifier for KMS context.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Encryption metadata from provenance.
/// </summary>
public required BundleEncryptionMetadata Metadata { get; init; }
/// <summary>
/// age private key for decryption (for Age mode).
/// </summary>
public string? AgePrivateKey { get; init; }
/// <summary>
/// Files to decrypt with their nonces.
/// </summary>
public required IReadOnlyList<BundleFileToDecrypt> Files { get; init; }
}
/// <summary>
/// A file to decrypt within a bundle.
/// </summary>
public sealed record BundleFileToDecrypt
{
/// <summary>
/// Relative path within the bundle (used for AAD validation).
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Source path of encrypted file.
/// </summary>
public required string SourcePath { get; init; }
/// <summary>
/// Destination path for decrypted content.
/// </summary>
public required string DestinationPath { get; init; }
/// <summary>
/// Nonce used during encryption (base64).
/// </summary>
public required string Nonce { get; init; }
/// <summary>
/// Expected plaintext hash for verification.
/// </summary>
public string? ExpectedHash { get; init; }
}
/// <summary>
/// Result of decrypting bundle content.
/// </summary>
public sealed record BundleDecryptResult
{
/// <summary>
/// Whether decryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Decrypted file results.
/// </summary>
public IReadOnlyList<DecryptedFileResult> DecryptedFiles { get; init; } = [];
/// <summary>
/// Error message if decryption failed.
/// </summary>
public string? ErrorMessage { get; init; }
public static BundleDecryptResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Result of decrypting a single file.
/// </summary>
public sealed record DecryptedFileResult
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Path to decrypted file.
/// </summary>
public required string DecryptedPath { get; init; }
/// <summary>
/// Whether hash verification passed.
/// </summary>
public bool HashVerified { get; init; }
/// <summary>
/// Computed hash of decrypted content.
/// </summary>
public string? ComputedHash { get; init; }
}

View File

@@ -0,0 +1,443 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Default implementation of the bundle encryption service using AES-256-GCM.
/// </summary>
public sealed class BundleEncryptionService : IBundleEncryptionService
{
private const int DekSizeBytes = 32; // AES-256
private const int NonceSizeBytes = 12; // GCM nonce
private const int TagSizeBytes = 16; // GCM tag
private readonly IAgeKeyWrapper? _ageKeyWrapper;
private readonly IKmsKeyWrapper? _kmsKeyWrapper;
private readonly ICryptoHash _cryptoHash;
private readonly ILogger<BundleEncryptionService> _logger;
public BundleEncryptionService(
ICryptoHash cryptoHash,
ILogger<BundleEncryptionService> logger,
IAgeKeyWrapper? ageKeyWrapper = null,
IKmsKeyWrapper? kmsKeyWrapper = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_ageKeyWrapper = ageKeyWrapper;
_kmsKeyWrapper = kmsKeyWrapper;
}
/// <inheritdoc />
public async Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var validationErrors = ValidateOptions(request.Options);
if (validationErrors.Count > 0)
{
return BundleEncryptResult.Failed(
$"Invalid encryption options: {string.Join("; ", validationErrors)}");
}
if (request.Options.Mode == BundleEncryptionMode.None)
{
_logger.LogDebug("Encryption disabled, skipping");
return new BundleEncryptResult { Success = true };
}
_logger.LogInformation(
"Encrypting {FileCount} files for run {RunId} using {Mode}",
request.Files.Count, request.RunId, request.Options.Mode);
try
{
// Generate DEK
var dek = RandomNumberGenerator.GetBytes(DekSizeBytes);
try
{
// Wrap DEK for all recipients
var recipients = await WrapDekForRecipientsAsync(
dek, request, cancellationToken);
if (recipients.Count == 0)
{
return BundleEncryptResult.Failed("No recipients configured for key wrapping");
}
// Encrypt each file
var encryptedFiles = new List<EncryptedFileResult>();
foreach (var file in request.Files)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await EncryptFileAsync(
file, dek, request.RunId, request.Options.AadFormat, cancellationToken);
encryptedFiles.Add(result);
}
var metadata = new BundleEncryptionMetadata
{
Mode = request.Options.Mode.ToString().ToLowerInvariant(),
AadFormat = request.Options.AadFormat,
Recipients = recipients
};
_logger.LogInformation(
"Encrypted {FileCount} files with {RecipientCount} recipients",
encryptedFiles.Count, recipients.Count);
return new BundleEncryptResult
{
Success = true,
EncryptedFiles = encryptedFiles,
Metadata = metadata
};
}
finally
{
// Zeroize DEK
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Encryption failed for run {RunId}", request.RunId);
return BundleEncryptResult.Failed($"Encryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Decrypting {FileCount} files for run {RunId}",
request.Files.Count, request.RunId);
try
{
// Find a recipient we can unwrap
var dek = await UnwrapDekAsync(request, cancellationToken);
if (dek is null)
{
return BundleDecryptResult.Failed("No matching key available for decryption");
}
try
{
// Decrypt each file
var decryptedFiles = new List<DecryptedFileResult>();
foreach (var file in request.Files)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await DecryptFileAsync(
file, dek, request.RunId, request.Metadata.AadFormat, cancellationToken);
decryptedFiles.Add(result);
}
_logger.LogInformation("Decrypted {FileCount} files", decryptedFiles.Count);
return new BundleDecryptResult
{
Success = true,
DecryptedFiles = decryptedFiles
};
}
finally
{
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Decryption failed for run {RunId}", request.RunId);
return BundleDecryptResult.Failed($"Decryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public IReadOnlyList<string> ValidateOptions(BundleEncryptionOptions options)
{
var errors = new List<string>();
if (options.Mode == BundleEncryptionMode.None)
{
return errors;
}
if (options.Mode == BundleEncryptionMode.Age)
{
if (options.Recipients.Count == 0)
{
errors.Add("Age mode requires at least one recipient public key");
}
if (_ageKeyWrapper is null)
{
errors.Add("Age key wrapper not available");
}
else
{
foreach (var recipient in options.Recipients)
{
if (!_ageKeyWrapper.IsValidPublicKey(recipient))
{
errors.Add($"Invalid age public key: {recipient[..Math.Min(10, recipient.Length)]}...");
}
}
}
}
if (options.Mode == BundleEncryptionMode.AesGcmKms)
{
if (string.IsNullOrEmpty(options.KmsKeyId))
{
errors.Add("KMS mode requires a KMS key ID");
}
if (_kmsKeyWrapper is null)
{
errors.Add("KMS key wrapper not available");
}
}
if (string.IsNullOrWhiteSpace(options.AadFormat))
{
errors.Add("AAD format cannot be empty");
}
return errors;
}
private async Task<List<WrappedKeyRecipient>> WrapDekForRecipientsAsync(
byte[] dek,
BundleEncryptRequest request,
CancellationToken cancellationToken)
{
var recipients = new List<WrappedKeyRecipient>();
if (request.Options.Mode == BundleEncryptionMode.Age && _ageKeyWrapper is not null)
{
// Wrap for each age recipient (sorted for determinism)
foreach (var recipientKey in request.Options.Recipients.OrderBy(r => r, StringComparer.Ordinal))
{
var wrappedKey = _ageKeyWrapper.WrapKey(dek, recipientKey);
recipients.Add(new WrappedKeyRecipient
{
Type = "age",
Recipient = recipientKey,
WrappedKey = wrappedKey
});
}
}
if (request.Options.Mode == BundleEncryptionMode.AesGcmKms &&
_kmsKeyWrapper is not null &&
!string.IsNullOrEmpty(request.Options.KmsKeyId))
{
var context = new Dictionary<string, string>
{
["runId"] = request.RunId.ToString("D"),
["tenant"] = request.TenantId.ToString("D")
};
var result = await _kmsKeyWrapper.WrapKeyAsync(
dek, request.Options.KmsKeyId, context, cancellationToken);
recipients.Add(new WrappedKeyRecipient
{
Type = "kms",
KmsKeyId = request.Options.KmsKeyId,
WrappedKey = result.WrappedKey,
KeyId = result.KeyId,
Algorithm = result.Algorithm
});
}
return recipients;
}
private async Task<byte[]?> UnwrapDekAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken)
{
// Try age first if we have a private key
if (!string.IsNullOrEmpty(request.AgePrivateKey) && _ageKeyWrapper is not null)
{
var ageRecipient = request.Metadata.Recipients
.FirstOrDefault(r => r.Type == "age");
if (ageRecipient is not null)
{
try
{
return _ageKeyWrapper.UnwrapKey(ageRecipient.WrappedKey, request.AgePrivateKey);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to unwrap with age key, trying next method");
}
}
}
// Try KMS
if (_kmsKeyWrapper is not null)
{
var kmsRecipient = request.Metadata.Recipients
.FirstOrDefault(r => r.Type == "kms");
if (kmsRecipient is not null && !string.IsNullOrEmpty(kmsRecipient.KmsKeyId))
{
var context = new Dictionary<string, string>
{
["runId"] = request.RunId.ToString("D"),
["tenant"] = request.TenantId.ToString("D")
};
return await _kmsKeyWrapper.UnwrapKeyAsync(
kmsRecipient.WrappedKey, kmsRecipient.KmsKeyId, context, cancellationToken);
}
}
return null;
}
private async Task<EncryptedFileResult> EncryptFileAsync(
BundleFileToEncrypt file,
byte[] dek,
Guid runId,
string aadFormat,
CancellationToken cancellationToken)
{
// Read plaintext
var plaintext = await File.ReadAllBytesAsync(file.SourcePath, cancellationToken);
// Compute plaintext hash for verification
var plaintextHash = _cryptoHash.ComputeHashHexForPurpose(plaintext, HashPurpose.Content);
// Generate nonce
var nonce = RandomNumberGenerator.GetBytes(NonceSizeBytes);
// Compute AAD
var aad = ComputeAad(aadFormat, runId, file.RelativePath);
// Encrypt with AES-GCM
var ciphertext = new byte[plaintext.Length];
var tag = new byte[TagSizeBytes];
using (var aesGcm = new AesGcm(dek, TagSizeBytes))
{
aesGcm.Encrypt(nonce, plaintext, ciphertext, tag, aad);
}
// Write encrypted file: nonce + ciphertext + tag
var encryptedContent = new byte[NonceSizeBytes + ciphertext.Length + TagSizeBytes];
nonce.CopyTo(encryptedContent, 0);
ciphertext.CopyTo(encryptedContent, NonceSizeBytes);
tag.CopyTo(encryptedContent, NonceSizeBytes + ciphertext.Length);
// Ensure directory exists
var destDir = Path.GetDirectoryName(file.DestinationPath);
if (!string.IsNullOrEmpty(destDir))
{
Directory.CreateDirectory(destDir);
}
await File.WriteAllBytesAsync(file.DestinationPath, encryptedContent, cancellationToken);
return new EncryptedFileResult
{
RelativePath = file.RelativePath,
EncryptedPath = file.DestinationPath,
Nonce = Convert.ToBase64String(nonce),
EncryptedSizeBytes = encryptedContent.Length,
PlaintextHash = plaintextHash
};
}
private async Task<DecryptedFileResult> DecryptFileAsync(
BundleFileToDecrypt file,
byte[] dek,
Guid runId,
string aadFormat,
CancellationToken cancellationToken)
{
// Read encrypted file
var encryptedContent = await File.ReadAllBytesAsync(file.SourcePath, cancellationToken);
if (encryptedContent.Length < NonceSizeBytes + TagSizeBytes)
{
throw new CryptographicException($"Encrypted file too small: {file.RelativePath}");
}
// Extract nonce, ciphertext, and tag
var nonce = encryptedContent.AsSpan(0, NonceSizeBytes);
var ciphertextLength = encryptedContent.Length - NonceSizeBytes - TagSizeBytes;
var ciphertext = encryptedContent.AsSpan(NonceSizeBytes, ciphertextLength);
var tag = encryptedContent.AsSpan(NonceSizeBytes + ciphertextLength, TagSizeBytes);
// Validate nonce matches expected
var expectedNonce = Convert.FromBase64String(file.Nonce);
if (!nonce.SequenceEqual(expectedNonce))
{
throw new CryptographicException($"Nonce mismatch for {file.RelativePath}");
}
// Compute AAD
var aad = ComputeAad(aadFormat, runId, file.RelativePath);
// Decrypt
var plaintext = new byte[ciphertextLength];
using (var aesGcm = new AesGcm(dek, TagSizeBytes))
{
aesGcm.Decrypt(nonce, ciphertext, tag, plaintext, aad);
}
// Ensure directory exists
var destDir = Path.GetDirectoryName(file.DestinationPath);
if (!string.IsNullOrEmpty(destDir))
{
Directory.CreateDirectory(destDir);
}
await File.WriteAllBytesAsync(file.DestinationPath, plaintext, cancellationToken);
// Verify hash if expected
var computedHash = _cryptoHash.ComputeHashHexForPurpose(plaintext, HashPurpose.Content);
var hashVerified = string.IsNullOrEmpty(file.ExpectedHash) ||
string.Equals(computedHash, file.ExpectedHash, StringComparison.OrdinalIgnoreCase);
if (!hashVerified)
{
_logger.LogWarning(
"Hash mismatch for {RelativePath}: expected {Expected}, got {Computed}",
file.RelativePath, file.ExpectedHash, computedHash);
}
return new DecryptedFileResult
{
RelativePath = file.RelativePath,
DecryptedPath = file.DestinationPath,
HashVerified = hashVerified,
ComputedHash = computedHash
};
}
private static byte[] ComputeAad(string aadFormat, Guid runId, string relativePath)
{
var aadString = aadFormat
.Replace("{runId}", runId.ToString("D"), StringComparison.OrdinalIgnoreCase)
.Replace("{relativePath}", relativePath, StringComparison.OrdinalIgnoreCase);
return Encoding.UTF8.GetBytes(aadString);
}
}

View File

@@ -0,0 +1,121 @@
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Service for encrypting and decrypting export bundle content.
/// </summary>
public interface IBundleEncryptionService
{
/// <summary>
/// Encrypts bundle files using the specified options.
/// </summary>
/// <param name="request">Encryption request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Encryption result with metadata for provenance.</returns>
Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Decrypts bundle files using the specified metadata.
/// </summary>
/// <param name="request">Decryption request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Decryption result with verification status.</returns>
Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates encryption options.
/// </summary>
/// <param name="options">Options to validate.</param>
/// <returns>List of validation errors (empty if valid).</returns>
IReadOnlyList<string> ValidateOptions(BundleEncryptionOptions options);
}
/// <summary>
/// Interface for age key operations (X25519).
/// </summary>
public interface IAgeKeyWrapper
{
/// <summary>
/// Wraps a DEK for an age recipient.
/// </summary>
/// <param name="dek">Data encryption key (32 bytes).</param>
/// <param name="recipientPublicKey">age public key (age1...).</param>
/// <returns>Wrapped key (base64).</returns>
string WrapKey(ReadOnlySpan<byte> dek, string recipientPublicKey);
/// <summary>
/// Unwraps a DEK using an age private key.
/// </summary>
/// <param name="wrappedKey">Wrapped key (base64).</param>
/// <param name="privateKey">age private key (AGE-SECRET-KEY-1...).</param>
/// <returns>Unwrapped DEK (32 bytes).</returns>
byte[] UnwrapKey(string wrappedKey, string privateKey);
/// <summary>
/// Validates an age public key format.
/// </summary>
bool IsValidPublicKey(string publicKey);
/// <summary>
/// Validates an age private key format.
/// </summary>
bool IsValidPrivateKey(string privateKey);
}
/// <summary>
/// Interface for KMS key wrapping operations.
/// </summary>
public interface IKmsKeyWrapper
{
/// <summary>
/// Wraps a DEK using KMS.
/// </summary>
/// <param name="dek">Data encryption key (32 bytes).</param>
/// <param name="kmsKeyId">KMS key identifier.</param>
/// <param name="encryptionContext">Encryption context for key binding.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Wrapped key result.</returns>
Task<KmsWrapResult> WrapKeyAsync(
ReadOnlyMemory<byte> dek,
string kmsKeyId,
IReadOnlyDictionary<string, string> encryptionContext,
CancellationToken cancellationToken = default);
/// <summary>
/// Unwraps a DEK using KMS.
/// </summary>
/// <param name="wrappedKey">Wrapped key (base64).</param>
/// <param name="kmsKeyId">KMS key identifier.</param>
/// <param name="encryptionContext">Encryption context for validation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Unwrapped DEK (32 bytes).</returns>
Task<byte[]> UnwrapKeyAsync(
string wrappedKey,
string kmsKeyId,
IReadOnlyDictionary<string, string> encryptionContext,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of KMS key wrapping.
/// </summary>
public sealed record KmsWrapResult
{
/// <summary>
/// Wrapped key (base64).
/// </summary>
public required string WrappedKey { get; init; }
/// <summary>
/// Algorithm used for wrapping.
/// </summary>
public required string Algorithm { get; init; }
/// <summary>
/// Key ID used (may differ from requested).
/// </summary>
public string? KeyId { get; init; }
}

View File

@@ -0,0 +1,164 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Stub implementation of age key wrapper for testing.
/// In production, use a real age library or CLI-backed implementation.
/// </summary>
/// <remarks>
/// This stub simulates age-style key wrapping using X25519 + HKDF + ChaCha20-Poly1305.
/// For production use, integrate with the actual age specification or age CLI.
/// age public keys start with "age1" and private keys start with "AGE-SECRET-KEY-1".
/// </remarks>
public sealed class StubAgeKeyWrapper : IAgeKeyWrapper
{
private readonly ILogger<StubAgeKeyWrapper> _logger;
// For testing: store wrapped keys in a simple format
// Real implementation would use X25519 ECDH + HKDF + ChaCha20-Poly1305
private const string TestKeyPrefix = "age-wrapped:";
public StubAgeKeyWrapper(ILogger<StubAgeKeyWrapper> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string WrapKey(ReadOnlySpan<byte> dek, string recipientPublicKey)
{
if (!IsValidPublicKey(recipientPublicKey))
{
throw new ArgumentException("Invalid age public key format", nameof(recipientPublicKey));
}
// Stub: Simply encrypt with a derived key from the public key
// Real implementation would use X25519 ephemeral key exchange
_logger.LogDebug("Wrapping DEK for recipient {Recipient}", recipientPublicKey[..10] + "...");
// Use a simple wrapping scheme for testing:
// 1. Generate ephemeral key (simulated as random nonce)
// 2. Derive wrapping key from recipient public key (simulated)
// 3. Encrypt DEK with AES-256-GCM
var nonce = RandomNumberGenerator.GetBytes(12);
var tag = new byte[16];
var ciphertext = new byte[dek.Length];
// Derive a test wrapping key from the public key (NOT cryptographically secure - stub only)
using var sha256 = SHA256.Create();
var wrappingKey = sha256.ComputeHash(Encoding.UTF8.GetBytes(recipientPublicKey));
using var aesGcm = new AesGcm(wrappingKey, 16);
aesGcm.Encrypt(nonce, dek, ciphertext, tag);
// Format: nonce (12) + ciphertext (32) + tag (16) = 60 bytes
var wrapped = new byte[nonce.Length + ciphertext.Length + tag.Length];
nonce.CopyTo(wrapped, 0);
ciphertext.CopyTo(wrapped, nonce.Length);
tag.CopyTo(wrapped, nonce.Length + ciphertext.Length);
return TestKeyPrefix + Convert.ToBase64String(wrapped);
}
/// <inheritdoc />
public byte[] UnwrapKey(string wrappedKey, string privateKey)
{
if (!IsValidPrivateKey(privateKey))
{
throw new ArgumentException("Invalid age private key format", nameof(privateKey));
}
if (!wrappedKey.StartsWith(TestKeyPrefix, StringComparison.Ordinal))
{
throw new CryptographicException("Invalid wrapped key format");
}
_logger.LogDebug("Unwrapping DEK with private key");
var wrapped = Convert.FromBase64String(wrappedKey[TestKeyPrefix.Length..]);
if (wrapped.Length < 12 + 16) // nonce + tag minimum
{
throw new CryptographicException("Wrapped key too short");
}
var nonce = wrapped.AsSpan(0, 12);
var ciphertextLength = wrapped.Length - 12 - 16;
var ciphertext = wrapped.AsSpan(12, ciphertextLength);
var tag = wrapped.AsSpan(12 + ciphertextLength, 16);
// Derive wrapping key from corresponding public key
// In real implementation, derive from private key via X25519
var publicKey = DerivePublicKeyFromPrivate(privateKey);
using var sha256 = SHA256.Create();
var wrappingKey = sha256.ComputeHash(Encoding.UTF8.GetBytes(publicKey));
var dek = new byte[ciphertextLength];
using var aesGcm = new AesGcm(wrappingKey, 16);
aesGcm.Decrypt(nonce, ciphertext, tag, dek);
return dek;
}
/// <inheritdoc />
public bool IsValidPublicKey(string publicKey)
{
// age public keys: age1[58 bech32 chars]
return !string.IsNullOrEmpty(publicKey) &&
publicKey.StartsWith("age1", StringComparison.Ordinal) &&
publicKey.Length >= 59; // age1 + at least 55 chars
}
/// <inheritdoc />
public bool IsValidPrivateKey(string privateKey)
{
// age private keys: AGE-SECRET-KEY-1[58 bech32 chars]
return !string.IsNullOrEmpty(privateKey) &&
privateKey.StartsWith("AGE-SECRET-KEY-1", StringComparison.Ordinal) &&
privateKey.Length >= 74; // AGE-SECRET-KEY-1 + at least 58 chars
}
/// <summary>
/// Stub method to derive public key from private key.
/// Real implementation would use X25519 curve multiplication.
/// </summary>
private static string DerivePublicKeyFromPrivate(string privateKey)
{
// For testing: hash the private key to get a deterministic "public key"
// This is NOT how age works - it's just for stub testing
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(privateKey));
var suffix = Convert.ToHexString(hash).ToLowerInvariant()[..55];
return $"age1{suffix}";
}
}
/// <summary>
/// Test key pair generator for age-style keys.
/// </summary>
public static class TestAgeKeyGenerator
{
/// <summary>
/// Generates a test key pair for use with StubAgeKeyWrapper.
/// </summary>
/// <returns>A tuple of (publicKey, privateKey).</returns>
public static (string PublicKey, string PrivateKey) GenerateKeyPair()
{
var randomBytes = RandomNumberGenerator.GetBytes(32);
var hex = Convert.ToHexString(randomBytes).ToLowerInvariant();
// Generate a valid-looking private key
var privateKey = $"AGE-SECRET-KEY-1{hex}{hex[..26]}";
// Derive public key using same logic as StubAgeKeyWrapper
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(privateKey));
var suffix = Convert.ToHexString(hash).ToLowerInvariant()[..55];
var publicKey = $"age1{suffix}";
return (publicKey, privateKey);
}
}

View File

@@ -0,0 +1,242 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Signature mode for export manifests.
/// </summary>
public enum ExportSignatureMode
{
/// <summary>
/// No signature.
/// </summary>
None = 0,
/// <summary>
/// Detached signature in a separate file.
/// </summary>
Detached = 1,
/// <summary>
/// Embedded signature within the manifest document.
/// </summary>
Embedded = 2,
/// <summary>
/// Both detached and embedded signatures.
/// </summary>
Both = 3
}
/// <summary>
/// Signing algorithm for export manifests.
/// </summary>
public enum ExportSigningAlgorithm
{
/// <summary>
/// HMAC-SHA256 signing.
/// </summary>
HmacSha256 = 1,
/// <summary>
/// ECDSA P-256 with SHA-256 (ES256).
/// </summary>
EcdsaP256Sha256 = 2,
/// <summary>
/// ECDSA P-384 with SHA-384 (ES384).
/// </summary>
EcdsaP384Sha384 = 3,
/// <summary>
/// RSA-PSS with SHA-256 (PS256).
/// </summary>
RsaPssSha256 = 4,
/// <summary>
/// EdDSA (Ed25519).
/// </summary>
EdDsa = 5
}
/// <summary>
/// Request to write an export manifest with optional signing.
/// </summary>
public sealed record ExportManifestWriteRequest(
Guid ExportId,
Guid TenantId,
ExportManifestContent ManifestContent,
ExportProvenanceContent ProvenanceContent,
ExportManifestSigningOptions? SigningOptions = null,
string? OutputDirectory = null,
IReadOnlyDictionary<string, string>? Metadata = null);
/// <summary>
/// Signing options for export manifests.
/// </summary>
public sealed record ExportManifestSigningOptions(
ExportSignatureMode Mode,
ExportSigningAlgorithm Algorithm,
string KeyId,
string? ProviderHint = null,
string? Secret = null);
/// <summary>
/// Content of an export manifest.
/// </summary>
public sealed record ExportManifestContent(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("profile")] ExportManifestProfile Profile,
[property: JsonPropertyName("scope")] ExportManifestScope Scope,
[property: JsonPropertyName("counts")] ExportManifestCounts Counts,
[property: JsonPropertyName("artifacts")] IReadOnlyList<ExportManifestArtifact> Artifacts,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("signature")] ExportManifestSignature? Signature = null);
/// <summary>
/// Export profile metadata in manifest.
/// </summary>
public sealed record ExportManifestProfile(
[property: JsonPropertyName("profileId")] string? ProfileId,
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("variant")] string? Variant);
/// <summary>
/// Scope metadata in manifest.
/// </summary>
public sealed record ExportManifestScope(
[property: JsonPropertyName("kinds")] IReadOnlyList<string> Kinds,
[property: JsonPropertyName("sourceRefs")] IReadOnlyList<string>? SourceRefs,
[property: JsonPropertyName("timeWindow")] ExportManifestTimeWindow? TimeWindow,
[property: JsonPropertyName("ecosystems")] IReadOnlyList<string>? Ecosystems);
/// <summary>
/// Time window in manifest scope.
/// </summary>
public sealed record ExportManifestTimeWindow(
[property: JsonPropertyName("from")] DateTimeOffset From,
[property: JsonPropertyName("to")] DateTimeOffset To);
/// <summary>
/// Counts in manifest.
/// </summary>
public sealed record ExportManifestCounts(
[property: JsonPropertyName("total")] int Total,
[property: JsonPropertyName("successful")] int Successful,
[property: JsonPropertyName("failed")] int Failed,
[property: JsonPropertyName("skipped")] int Skipped,
[property: JsonPropertyName("byKind")] IReadOnlyDictionary<string, int> ByKind);
/// <summary>
/// Artifact entry in manifest.
/// </summary>
public sealed record ExportManifestArtifact(
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("sha256")] string Sha256,
[property: JsonPropertyName("sizeBytes")] long SizeBytes,
[property: JsonPropertyName("contentType")] string ContentType,
[property: JsonPropertyName("category")] string? Category);
/// <summary>
/// Embedded signature in manifest.
/// </summary>
public sealed record ExportManifestSignature(
[property: JsonPropertyName("algorithm")] string Algorithm,
[property: JsonPropertyName("keyId")] string KeyId,
[property: JsonPropertyName("value")] string Value,
[property: JsonPropertyName("signedAt")] DateTimeOffset SignedAt,
[property: JsonPropertyName("provider")] string? Provider);
/// <summary>
/// Content of export provenance document.
/// </summary>
public sealed record ExportProvenanceContent(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("subjects")] IReadOnlyList<ExportProvenanceSubject> Subjects,
[property: JsonPropertyName("inputs")] ExportProvenanceInputs Inputs,
[property: JsonPropertyName("builder")] ExportProvenanceBuilder Builder,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("signature")] ExportManifestSignature? Signature = null);
/// <summary>
/// Subject in provenance document.
/// </summary>
public sealed record ExportProvenanceSubject(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("digest")] IReadOnlyDictionary<string, string> Digest);
/// <summary>
/// Inputs in provenance document.
/// </summary>
public sealed record ExportProvenanceInputs(
[property: JsonPropertyName("profileId")] string? ProfileId,
[property: JsonPropertyName("scopeKinds")] IReadOnlyList<string> ScopeKinds,
[property: JsonPropertyName("sourceRefs")] IReadOnlyList<string>? SourceRefs,
[property: JsonPropertyName("correlationId")] string? CorrelationId);
/// <summary>
/// Builder info in provenance document.
/// </summary>
public sealed record ExportProvenanceBuilder(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("buildTimestamp")] DateTimeOffset? BuildTimestamp);
/// <summary>
/// Result of writing export manifest.
/// </summary>
public sealed record ExportManifestWriteResult
{
public bool Success { get; init; }
public string? ErrorMessage { get; init; }
public string? ManifestPath { get; init; }
public string? ManifestJson { get; init; }
public string? ProvenancePath { get; init; }
public string? ProvenanceJson { get; init; }
public string? DetachedSignaturePath { get; init; }
public ExportManifestSignature? ManifestSignature { get; init; }
public ExportManifestSignature? ProvenanceSignature { get; init; }
public static ExportManifestWriteResult Succeeded(
string manifestPath,
string manifestJson,
string provenancePath,
string provenanceJson,
string? detachedSignaturePath = null,
ExportManifestSignature? manifestSignature = null,
ExportManifestSignature? provenanceSignature = null) =>
new()
{
Success = true,
ManifestPath = manifestPath,
ManifestJson = manifestJson,
ProvenancePath = provenancePath,
ProvenanceJson = provenanceJson,
DetachedSignaturePath = detachedSignaturePath,
ManifestSignature = manifestSignature,
ProvenanceSignature = provenanceSignature
};
public static ExportManifestWriteResult Failed(string errorMessage) =>
new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// DSSE envelope for detached signatures.
/// </summary>
public sealed record ExportManifestDsseEnvelope(
[property: JsonPropertyName("payloadType")] string PayloadType,
[property: JsonPropertyName("payload")] string Payload,
[property: JsonPropertyName("signatures")] IReadOnlyList<ExportManifestDsseSignatureEntry> Signatures);
/// <summary>
/// Signature entry in DSSE envelope.
/// </summary>
public sealed record ExportManifestDsseSignatureEntry(
[property: JsonPropertyName("sig")] string Signature,
[property: JsonPropertyName("keyid")] string KeyId);

View File

@@ -0,0 +1,397 @@
using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Default implementation of export manifest writer with KMS and HMAC signing support.
/// </summary>
public sealed class ExportManifestWriter : IExportManifestWriter
{
private const string ManifestPayloadType = "application/vnd.stellaops.export.manifest+json";
private const string ProvenancePayloadType = "application/vnd.stellaops.export.provenance+json";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ILogger<ExportManifestWriter> _logger;
private readonly ICryptoProviderRegistry? _cryptoRegistry;
private readonly ICryptoHmac? _cryptoHmac;
private readonly TimeProvider _timeProvider;
public ExportManifestWriter(
ILogger<ExportManifestWriter> logger,
ICryptoProviderRegistry? cryptoRegistry = null,
ICryptoHmac? cryptoHmac = null,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoRegistry = cryptoRegistry;
_cryptoHmac = cryptoHmac;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
public async Task<ExportManifestWriteResult> WriteAsync(
ExportManifestWriteRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
_logger.LogDebug("Writing export manifest for export {ExportId}", request.ExportId);
// Serialize manifest and provenance
var manifestJson = JsonSerializer.Serialize(request.ManifestContent, SerializerOptions);
var provenanceJson = JsonSerializer.Serialize(request.ProvenanceContent, SerializerOptions);
ExportManifestSignature? manifestSig = null;
ExportManifestSignature? provenanceSig = null;
string? detachedSignaturePath = null;
// Apply signing if requested
if (request.SigningOptions is not null && request.SigningOptions.Mode != ExportSignatureMode.None)
{
var signer = CreateSigner(request.SigningOptions);
// Sign manifest
var manifestEnvelope = await SignContentAsync(
manifestJson,
ManifestPayloadType,
signer,
cancellationToken);
// Sign provenance
var provenanceEnvelope = await SignContentAsync(
provenanceJson,
ProvenancePayloadType,
signer,
cancellationToken);
var signedAt = _timeProvider.GetUtcNow();
manifestSig = new ExportManifestSignature(
signer.Algorithm,
signer.KeyId,
manifestEnvelope.Signatures[0].Signature,
signedAt,
signer.Provider);
provenanceSig = new ExportManifestSignature(
signer.Algorithm,
signer.KeyId,
provenanceEnvelope.Signatures[0].Signature,
signedAt,
signer.Provider);
// Write detached signatures if requested
if (request.SigningOptions.Mode is ExportSignatureMode.Detached or ExportSignatureMode.Both)
{
if (!string.IsNullOrWhiteSpace(request.OutputDirectory))
{
var signaturePath = Path.Combine(
request.OutputDirectory,
$"export-{request.ExportId:N}-signatures.dsse.json");
var combinedEnvelope = new
{
manifestSignature = manifestEnvelope,
provenanceSignature = provenanceEnvelope,
signedAt,
keyId = signer.KeyId,
algorithm = signer.Algorithm,
provider = signer.Provider
};
await File.WriteAllTextAsync(
signaturePath,
JsonSerializer.Serialize(combinedEnvelope, SerializerOptions),
cancellationToken);
detachedSignaturePath = signaturePath;
}
}
// Embed signatures if requested
if (request.SigningOptions.Mode is ExportSignatureMode.Embedded or ExportSignatureMode.Both)
{
var manifestWithSig = request.ManifestContent with { Signature = manifestSig };
var provenanceWithSig = request.ProvenanceContent with { Signature = provenanceSig };
manifestJson = JsonSerializer.Serialize(manifestWithSig, SerializerOptions);
provenanceJson = JsonSerializer.Serialize(provenanceWithSig, SerializerOptions);
}
}
// Write files if output directory specified
string manifestPath = string.Empty;
string provenancePath = string.Empty;
if (!string.IsNullOrWhiteSpace(request.OutputDirectory))
{
Directory.CreateDirectory(request.OutputDirectory);
manifestPath = Path.Combine(request.OutputDirectory, "export-manifest.json");
provenancePath = Path.Combine(request.OutputDirectory, "export-provenance.json");
await File.WriteAllTextAsync(manifestPath, manifestJson, cancellationToken);
await File.WriteAllTextAsync(provenancePath, provenanceJson, cancellationToken);
}
_logger.LogInformation(
"Export manifest written for {ExportId} with signature mode {Mode}",
request.ExportId,
request.SigningOptions?.Mode ?? ExportSignatureMode.None);
return ExportManifestWriteResult.Succeeded(
manifestPath,
manifestJson,
provenancePath,
provenanceJson,
detachedSignaturePath,
manifestSig,
provenanceSig);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to write export manifest for {ExportId}", request.ExportId);
return ExportManifestWriteResult.Failed($"Failed to write manifest: {ex.Message}");
}
}
/// <inheritdoc/>
public async Task<ExportManifestDsseEnvelope> SignManifestAsync(
string manifestJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifestJson);
ArgumentNullException.ThrowIfNull(signingOptions);
var signer = CreateSigner(signingOptions);
return await SignContentAsync(manifestJson, ManifestPayloadType, signer, cancellationToken);
}
/// <inheritdoc/>
public async Task<ExportManifestDsseEnvelope> SignProvenanceAsync(
string provenanceJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(provenanceJson);
ArgumentNullException.ThrowIfNull(signingOptions);
var signer = CreateSigner(signingOptions);
return await SignContentAsync(provenanceJson, ProvenancePayloadType, signer, cancellationToken);
}
/// <inheritdoc/>
public async Task<bool> VerifySignatureAsync(
string content,
ExportManifestDsseEnvelope envelope,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
ArgumentNullException.ThrowIfNull(envelope);
ArgumentNullException.ThrowIfNull(signingOptions);
try
{
var signer = CreateSigner(signingOptions);
var pae = BuildPae(envelope.PayloadType, Encoding.UTF8.GetBytes(content));
foreach (var sig in envelope.Signatures)
{
var sigBytes = Convert.FromBase64String(sig.Signature);
if (await signer.VerifyAsync(pae, sigBytes, cancellationToken))
{
return true;
}
}
return false;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Signature verification failed");
return false;
}
}
private IExportManifestSigner CreateSigner(ExportManifestSigningOptions options)
{
return options.Algorithm switch
{
ExportSigningAlgorithm.HmacSha256 => CreateHmacSigner(options),
ExportSigningAlgorithm.EcdsaP256Sha256 => CreateKmsSigner(options, "ES256"),
ExportSigningAlgorithm.EcdsaP384Sha384 => CreateKmsSigner(options, "ES384"),
ExportSigningAlgorithm.RsaPssSha256 => CreateKmsSigner(options, "PS256"),
ExportSigningAlgorithm.EdDsa => CreateKmsSigner(options, "EdDSA"),
_ => throw new NotSupportedException($"Signing algorithm '{options.Algorithm}' is not supported.")
};
}
private IExportManifestSigner CreateHmacSigner(ExportManifestSigningOptions options)
{
if (_cryptoHmac is null)
{
throw new InvalidOperationException("HMAC signing requires ICryptoHmac to be configured.");
}
if (string.IsNullOrWhiteSpace(options.Secret))
{
throw new ArgumentException("HMAC signing requires a secret key.", nameof(options));
}
return new HmacExportManifestSigner(_cryptoHmac, options.Secret, options.KeyId);
}
private IExportManifestSigner CreateKmsSigner(ExportManifestSigningOptions options, string algorithmId)
{
if (_cryptoRegistry is null)
{
throw new InvalidOperationException(
"KMS signing requires ICryptoProviderRegistry to be configured.");
}
var keyRef = new CryptoKeyReference(options.KeyId, options.ProviderHint);
var resolution = _cryptoRegistry.ResolveSigner(
CryptoCapability.Signing,
algorithmId,
keyRef,
options.ProviderHint);
return new KmsExportManifestSigner(resolution.Signer, resolution.ProviderName);
}
private async Task<ExportManifestDsseEnvelope> SignContentAsync(
string content,
string payloadType,
IExportManifestSigner signer,
CancellationToken cancellationToken)
{
var contentBytes = Encoding.UTF8.GetBytes(content);
var pae = BuildPae(payloadType, contentBytes);
var signature = await signer.SignAsync(pae, cancellationToken);
var signatureBase64 = Convert.ToBase64String(signature);
return new ExportManifestDsseEnvelope(
payloadType,
Convert.ToBase64String(contentBytes),
[new ExportManifestDsseSignatureEntry(signatureBase64, signer.KeyId)]);
}
/// <summary>
/// Builds DSSE Pre-Authentication Encoding (PAE).
/// PAE = "DSSEv1" + SP + LEN(payloadType) + SP + payloadType + SP + LEN(payload) + SP + payload
/// </summary>
private static byte[] BuildPae(string payloadType, byte[] payload)
{
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var preamble = Encoding.UTF8.GetBytes("DSSEv1 ");
var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture);
var payloadLenStr = payload.Length.ToString(CultureInfo.InvariantCulture);
var result = new List<byte>(
preamble.Length +
typeLenStr.Length + 1 +
typeBytes.Length + 1 +
payloadLenStr.Length + 1 +
payload.Length);
result.AddRange(preamble);
result.AddRange(Encoding.UTF8.GetBytes(typeLenStr));
result.Add(0x20); // space
result.AddRange(typeBytes);
result.Add(0x20); // space
result.AddRange(Encoding.UTF8.GetBytes(payloadLenStr));
result.Add(0x20); // space
result.AddRange(payload);
return result.ToArray();
}
}
/// <summary>
/// HMAC-based export manifest signer.
/// </summary>
internal sealed class HmacExportManifestSigner : IExportManifestSigner
{
private readonly ICryptoHmac _cryptoHmac;
private readonly byte[] _key;
public HmacExportManifestSigner(ICryptoHmac cryptoHmac, string secret, string keyId)
{
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
if (string.IsNullOrWhiteSpace(secret))
{
throw new ArgumentException("Secret cannot be empty.", nameof(secret));
}
_key = Encoding.UTF8.GetBytes(secret);
KeyId = string.IsNullOrWhiteSpace(keyId) ? "hmac-sha256" : keyId;
}
public string KeyId { get; }
public string Algorithm => "HMAC-SHA256";
public string? Provider => "HMAC";
public Task<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var signature = _cryptoHmac.ComputeHmacForPurpose(_key, data.Span, HmacPurpose.Signing);
return Task.FromResult(signature);
}
public Task<bool> VerifyAsync(
ReadOnlyMemory<byte> data,
ReadOnlyMemory<byte> signature,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var expected = _cryptoHmac.ComputeHmacForPurpose(_key, data.Span, HmacPurpose.Signing);
return Task.FromResult(expected.AsSpan().SequenceEqual(signature.Span));
}
}
/// <summary>
/// KMS-backed export manifest signer using ICryptoProviderRegistry.
/// </summary>
internal sealed class KmsExportManifestSigner : IExportManifestSigner
{
private readonly ICryptoSigner _signer;
public KmsExportManifestSigner(ICryptoSigner signer, string providerName)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
Provider = providerName;
}
public string KeyId => _signer.KeyId;
public string Algorithm => _signer.AlgorithmId;
public string? Provider { get; }
public async Task<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
return await _signer.SignAsync(data, cancellationToken);
}
public async Task<bool> VerifyAsync(
ReadOnlyMemory<byte> data,
ReadOnlyMemory<byte> signature,
CancellationToken cancellationToken = default)
{
return await _signer.VerifyAsync(data, signature, cancellationToken);
}
}

View File

@@ -0,0 +1,93 @@
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Interface for writing export manifests and provenance documents with optional signing.
/// </summary>
public interface IExportManifestWriter
{
/// <summary>
/// Writes manifest and provenance documents with optional signing.
/// </summary>
/// <param name="request">The write request with manifest/provenance content and signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Write result with paths and signatures.</returns>
Task<ExportManifestWriteResult> WriteAsync(
ExportManifestWriteRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Signs manifest content and returns a DSSE envelope.
/// </summary>
/// <param name="manifestJson">The manifest JSON to sign.</param>
/// <param name="signingOptions">Signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>DSSE envelope with signature.</returns>
Task<ExportManifestDsseEnvelope> SignManifestAsync(
string manifestJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default);
/// <summary>
/// Signs provenance content and returns a DSSE envelope.
/// </summary>
/// <param name="provenanceJson">The provenance JSON to sign.</param>
/// <param name="signingOptions">Signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>DSSE envelope with signature.</returns>
Task<ExportManifestDsseEnvelope> SignProvenanceAsync(
string provenanceJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a DSSE signature against content.
/// </summary>
/// <param name="content">The original content that was signed.</param>
/// <param name="envelope">The DSSE envelope with signature.</param>
/// <param name="signingOptions">Signing options for verification.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if signature is valid.</returns>
Task<bool> VerifySignatureAsync(
string content,
ExportManifestDsseEnvelope envelope,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for export manifest signing operations.
/// </summary>
public interface IExportManifestSigner
{
/// <summary>
/// Signs data using the configured algorithm and key.
/// </summary>
/// <param name="data">Data to sign.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Signature bytes.</returns>
Task<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a signature against data.
/// </summary>
/// <param name="data">Original data.</param>
/// <param name="signature">Signature to verify.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if valid.</returns>
Task<bool> VerifyAsync(ReadOnlyMemory<byte> data, ReadOnlyMemory<byte> signature, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the key ID for this signer.
/// </summary>
string KeyId { get; }
/// <summary>
/// Gets the algorithm name for this signer.
/// </summary>
string Algorithm { get; }
/// <summary>
/// Gets the provider name for this signer.
/// </summary>
string? Provider { get; }
}

View File

@@ -0,0 +1,69 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Extension methods for registering manifest writer services.
/// </summary>
public static class ManifestServiceCollectionExtensions
{
/// <summary>
/// Registers the export manifest writer with default configuration.
/// </summary>
public static IServiceCollection AddExportManifestWriter(this IServiceCollection services)
{
services.AddSingleton<IExportManifestWriter>(sp =>
{
var logger = sp.GetRequiredService<ILogger<ExportManifestWriter>>();
var cryptoRegistry = sp.GetService<ICryptoProviderRegistry>();
var cryptoHmac = sp.GetService<ICryptoHmac>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new ExportManifestWriter(logger, cryptoRegistry, cryptoHmac, timeProvider);
});
return services;
}
/// <summary>
/// Registers the export manifest writer with HMAC signing support only.
/// </summary>
public static IServiceCollection AddExportManifestWriterWithHmac(
this IServiceCollection services,
ICryptoHmac cryptoHmac)
{
ArgumentNullException.ThrowIfNull(cryptoHmac);
services.AddSingleton<IExportManifestWriter>(sp =>
{
var logger = sp.GetRequiredService<ILogger<ExportManifestWriter>>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new ExportManifestWriter(logger, cryptoRegistry: null, cryptoHmac, timeProvider);
});
return services;
}
/// <summary>
/// Registers the export manifest writer with KMS signing support only.
/// </summary>
public static IServiceCollection AddExportManifestWriterWithKms(
this IServiceCollection services,
ICryptoProviderRegistry cryptoRegistry)
{
ArgumentNullException.ThrowIfNull(cryptoRegistry);
services.AddSingleton<IExportManifestWriter>(sp =>
{
var logger = sp.GetRequiredService<ILogger<ExportManifestWriter>>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new ExportManifestWriter(logger, cryptoRegistry, cryptoHmac: null, timeProvider);
});
return services;
}
}

View File

@@ -0,0 +1,305 @@
using System.Collections.Concurrent;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// In-memory implementation of the base manifest store for testing and simple deployments.
/// </summary>
public sealed class InMemoryMirrorBaseManifestStore : IMirrorBaseManifestStore
{
private readonly ConcurrentDictionary<string, ManifestRecord> _manifests = new(StringComparer.OrdinalIgnoreCase);
private static string GetKey(Guid runId, Guid tenantId) => $"{tenantId:D}:{runId:D}";
/// <inheritdoc />
public Task<IReadOnlyList<MirrorBaseManifestEntry>?> GetBaseManifestEntriesAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default)
{
var key = GetKey(runId, tenantId);
if (_manifests.TryGetValue(key, out var record))
{
return Task.FromResult<IReadOnlyList<MirrorBaseManifestEntry>?>(record.Entries);
}
return Task.FromResult<IReadOnlyList<MirrorBaseManifestEntry>?>(null);
}
/// <inheritdoc />
public Task<string?> GetManifestDigestAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default)
{
var key = GetKey(runId, tenantId);
if (_manifests.TryGetValue(key, out var record))
{
return Task.FromResult<string?>(record.Digest);
}
return Task.FromResult<string?>(null);
}
/// <inheritdoc />
public Task SaveManifestEntriesAsync(
Guid runId,
Guid tenantId,
string manifestDigest,
IReadOnlyList<MirrorBaseManifestEntry> entries,
CancellationToken cancellationToken = default)
{
var key = GetKey(runId, tenantId);
_manifests[key] = new ManifestRecord(manifestDigest, entries);
return Task.CompletedTask;
}
/// <summary>
/// Clears all stored manifests (for testing).
/// </summary>
public void Clear() => _manifests.Clear();
/// <summary>
/// Gets the number of stored manifests.
/// </summary>
public int Count => _manifests.Count;
private sealed record ManifestRecord(string Digest, IReadOnlyList<MirrorBaseManifestEntry> Entries);
}
/// <summary>
/// In-memory implementation of the content store for testing and simple deployments.
/// </summary>
public sealed class InMemoryMirrorContentStore : IMirrorContentStore
{
private readonly ConcurrentDictionary<string, byte[]> _content = new(StringComparer.OrdinalIgnoreCase);
private readonly ICryptoHash _cryptoHash;
private readonly string _tempDirectory;
public InMemoryMirrorContentStore(ICryptoHash cryptoHash, string? tempDirectory = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_tempDirectory = tempDirectory ?? Path.Combine(Path.GetTempPath(), "mirror-content-store");
Directory.CreateDirectory(_tempDirectory);
}
/// <inheritdoc />
public Task<bool> ExistsAsync(string contentHash, CancellationToken cancellationToken = default)
{
return Task.FromResult(_content.ContainsKey(contentHash));
}
/// <inheritdoc />
public Task<Stream?> GetAsync(string contentHash, CancellationToken cancellationToken = default)
{
if (_content.TryGetValue(contentHash, out var bytes))
{
return Task.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
}
return Task.FromResult<Stream?>(null);
}
/// <inheritdoc />
public async Task<string> StoreAsync(Stream content, string? expectedHash = null, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
using var ms = new MemoryStream();
await content.CopyToAsync(ms, cancellationToken);
var bytes = ms.ToArray();
var hash = _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
if (!string.IsNullOrEmpty(expectedHash) &&
!string.Equals(hash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Content hash mismatch: expected {expectedHash}, computed {hash}");
}
_content[hash] = bytes;
// Also write to temp file for GetLocalPath
var localPath = Path.Combine(_tempDirectory, hash);
await File.WriteAllBytesAsync(localPath, bytes, cancellationToken);
return hash;
}
/// <inheritdoc />
public string? GetLocalPath(string contentHash)
{
var path = Path.Combine(_tempDirectory, contentHash);
return File.Exists(path) ? path : null;
}
/// <summary>
/// Clears all stored content (for testing).
/// </summary>
public void Clear()
{
_content.Clear();
if (Directory.Exists(_tempDirectory))
{
foreach (var file in Directory.GetFiles(_tempDirectory))
{
try { File.Delete(file); } catch { /* ignore */ }
}
}
}
/// <summary>
/// Gets the number of stored content items.
/// </summary>
public int Count => _content.Count;
}
/// <summary>
/// Filesystem-based implementation of the content store for production use.
/// Uses content-addressable storage with SHA-256 hashes.
/// </summary>
public sealed class FileSystemMirrorContentStore : IMirrorContentStore, IDisposable
{
private readonly string _storePath;
private readonly ICryptoHash _cryptoHash;
private readonly bool _ownsDirectory;
public FileSystemMirrorContentStore(string storePath, ICryptoHash cryptoHash, bool createIfMissing = true)
{
_storePath = storePath ?? throw new ArgumentNullException(nameof(storePath));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
if (!Directory.Exists(_storePath))
{
if (createIfMissing)
{
Directory.CreateDirectory(_storePath);
_ownsDirectory = true;
}
else
{
throw new DirectoryNotFoundException($"Content store directory not found: {_storePath}");
}
}
}
/// <inheritdoc />
public Task<bool> ExistsAsync(string contentHash, CancellationToken cancellationToken = default)
{
var path = GetContentPath(contentHash);
return Task.FromResult(File.Exists(path));
}
/// <inheritdoc />
public Task<Stream?> GetAsync(string contentHash, CancellationToken cancellationToken = default)
{
var path = GetContentPath(contentHash);
if (!File.Exists(path))
{
return Task.FromResult<Stream?>(null);
}
var stream = new FileStream(
path,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 64 * 1024,
FileOptions.Asynchronous | FileOptions.SequentialScan);
return Task.FromResult<Stream?>(stream);
}
/// <inheritdoc />
public async Task<string> StoreAsync(Stream content, string? expectedHash = null, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
// Write to temp file first
var tempPath = Path.Combine(_storePath, $".tmp-{Guid.NewGuid():N}");
try
{
await using (var tempStream = new FileStream(
tempPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 64 * 1024,
FileOptions.Asynchronous))
{
await content.CopyToAsync(tempStream, cancellationToken);
}
// Compute hash
var bytes = await File.ReadAllBytesAsync(tempPath, cancellationToken);
var hash = _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
if (!string.IsNullOrEmpty(expectedHash) &&
!string.Equals(hash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Content hash mismatch: expected {expectedHash}, computed {hash}");
}
// Move to final location
var finalPath = GetContentPath(hash);
EnsureDirectoryExists(finalPath);
if (File.Exists(finalPath))
{
// Content already exists, just delete temp
File.Delete(tempPath);
}
else
{
File.Move(tempPath, finalPath);
}
return hash;
}
catch
{
try { File.Delete(tempPath); } catch { /* ignore */ }
throw;
}
}
/// <inheritdoc />
public string? GetLocalPath(string contentHash)
{
var path = GetContentPath(contentHash);
return File.Exists(path) ? path : null;
}
private string GetContentPath(string contentHash)
{
// Use sharded directory structure: first 2 chars / next 2 chars / full hash
if (contentHash.Length < 4)
{
return Path.Combine(_storePath, contentHash);
}
return Path.Combine(
_storePath,
contentHash[..2],
contentHash[2..4],
contentHash);
}
private static void EnsureDirectoryExists(string filePath)
{
var dir = Path.GetDirectoryName(filePath);
if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
}
public void Dispose()
{
// Only clean up if we created the directory and it's a temp directory
if (_ownsDirectory && _storePath.Contains("tmp", StringComparison.OrdinalIgnoreCase))
{
try { Directory.Delete(_storePath, recursive: true); } catch { /* ignore */ }
}
}
}

View File

@@ -0,0 +1,304 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Request to compute a delta between a base export and current items.
/// </summary>
public sealed record MirrorDeltaComputeRequest
{
/// <summary>
/// Base export run ID to compare against.
/// </summary>
public required Guid BaseRunId { get; init; }
/// <summary>
/// Base manifest digest for validation.
/// </summary>
public required string BaseManifestDigest { get; init; }
/// <summary>
/// Tenant ID for scoping.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Current items to compare with base.
/// </summary>
public required IReadOnlyList<MirrorDeltaItem> CurrentItems { get; init; }
/// <summary>
/// Whether to reset the baseline (include all items regardless of changes).
/// </summary>
public bool ResetBaseline { get; init; }
}
/// <summary>
/// Item for delta comparison.
/// </summary>
public sealed record MirrorDeltaItem
{
/// <summary>
/// Unique item identifier.
/// </summary>
public required string ItemId { get; init; }
/// <summary>
/// Category of the item.
/// </summary>
public required MirrorBundleDataCategory Category { get; init; }
/// <summary>
/// Content-addressable hash (SHA-256) of the item.
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// Path within the bundle.
/// </summary>
public required string BundlePath { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long SizeBytes { get; init; }
/// <summary>
/// Last modified timestamp.
/// </summary>
public DateTimeOffset? ModifiedAt { get; init; }
/// <summary>
/// Source path to the item content.
/// </summary>
public string? SourcePath { get; init; }
}
/// <summary>
/// Result of computing a delta.
/// </summary>
public sealed record MirrorDeltaComputeResult
{
/// <summary>
/// Whether the computation succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Items that were added since the base export.
/// </summary>
public IReadOnlyList<MirrorDeltaItem> AddedItems { get; init; } = [];
/// <summary>
/// Items that were changed since the base export.
/// </summary>
public IReadOnlyList<MirrorDeltaChangeItem> ChangedItems { get; init; } = [];
/// <summary>
/// Items that were removed since the base export.
/// </summary>
public IReadOnlyList<MirrorDeltaRemovedItem> RemovedItems { get; init; } = [];
/// <summary>
/// Items that are unchanged and can be skipped (content-addressed reuse).
/// </summary>
public IReadOnlyList<MirrorDeltaItem> UnchangedItems { get; init; } = [];
/// <summary>
/// Error message if computation failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Base export ID used for comparison.
/// </summary>
public string? BaseExportId { get; init; }
/// <summary>
/// Base manifest digest used for comparison.
/// </summary>
public string? BaseManifestDigest { get; init; }
/// <summary>
/// Whether baseline was reset.
/// </summary>
public bool BaselineReset { get; init; }
/// <summary>
/// Counts by category.
/// </summary>
public MirrorDeltaCategoryCounts Counts { get; init; } = new();
public static MirrorDeltaComputeResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// A changed item with both old and new hashes.
/// </summary>
public sealed record MirrorDeltaChangeItem
{
/// <summary>
/// The current item state.
/// </summary>
public required MirrorDeltaItem Current { get; init; }
/// <summary>
/// Hash of the previous version.
/// </summary>
public required string PreviousContentHash { get; init; }
/// <summary>
/// Previous size in bytes.
/// </summary>
public long PreviousSizeBytes { get; init; }
}
/// <summary>
/// A removed item.
/// </summary>
public sealed record MirrorDeltaRemovedItem
{
/// <summary>
/// Item identifier.
/// </summary>
public required string ItemId { get; init; }
/// <summary>
/// Category of the removed item.
/// </summary>
public required MirrorBundleDataCategory Category { get; init; }
/// <summary>
/// Bundle path that was removed.
/// </summary>
public required string BundlePath { get; init; }
/// <summary>
/// Hash of the content that was removed.
/// </summary>
public required string ContentHash { get; init; }
}
/// <summary>
/// Counts of delta changes by category.
/// </summary>
public sealed record MirrorDeltaCategoryCounts
{
[JsonPropertyName("added")]
public MirrorBundleDeltaCounts Added { get; init; } = new(0, 0, 0);
[JsonPropertyName("changed")]
public MirrorBundleDeltaCounts Changed { get; init; } = new(0, 0, 0);
[JsonPropertyName("removed")]
public MirrorBundleDeltaCounts Removed { get; init; } = new(0, 0, 0);
[JsonPropertyName("unchanged")]
public MirrorBundleDeltaCounts Unchanged { get; init; } = new(0, 0, 0);
}
/// <summary>
/// Manifest entry from a base export for delta comparison.
/// </summary>
public sealed record MirrorBaseManifestEntry
{
/// <summary>
/// Item identifier.
/// </summary>
public required string ItemId { get; init; }
/// <summary>
/// Category of the item.
/// </summary>
public required MirrorBundleDataCategory Category { get; init; }
/// <summary>
/// Bundle path.
/// </summary>
public required string BundlePath { get; init; }
/// <summary>
/// Content hash (SHA-256).
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long SizeBytes { get; init; }
}
/// <summary>
/// Interface for retrieving base export manifests for delta comparison.
/// </summary>
public interface IMirrorBaseManifestStore
{
/// <summary>
/// Gets the manifest entries from a base export.
/// </summary>
/// <param name="runId">The base export run ID.</param>
/// <param name="tenantId">Tenant ID for scoping.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Manifest entries, or null if not found.</returns>
Task<IReadOnlyList<MirrorBaseManifestEntry>?> GetBaseManifestEntriesAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the manifest digest for a base export.
/// </summary>
Task<string?> GetManifestDigestAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves manifest entries for a completed export (for future delta comparisons).
/// </summary>
Task SaveManifestEntriesAsync(
Guid runId,
Guid tenantId,
string manifestDigest,
IReadOnlyList<MirrorBaseManifestEntry> entries,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for content-addressed storage for delta reuse.
/// </summary>
public interface IMirrorContentStore
{
/// <summary>
/// Checks if content with the given hash exists.
/// </summary>
/// <param name="contentHash">SHA-256 hash of the content.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if content exists.</returns>
Task<bool> ExistsAsync(string contentHash, CancellationToken cancellationToken = default);
/// <summary>
/// Gets content by hash.
/// </summary>
/// <param name="contentHash">SHA-256 hash of the content.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Content stream, or null if not found.</returns>
Task<Stream?> GetAsync(string contentHash, CancellationToken cancellationToken = default);
/// <summary>
/// Stores content and returns its hash.
/// </summary>
/// <param name="content">Content stream.</param>
/// <param name="expectedHash">Optional expected hash for validation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Hash of the stored content.</returns>
Task<string> StoreAsync(Stream content, string? expectedHash = null, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the local file path for cached content (for bundle building).
/// </summary>
/// <param name="contentHash">SHA-256 hash of the content.</param>
/// <returns>File path if content is cached locally, null otherwise.</returns>
string? GetLocalPath(string contentHash);
}

View File

@@ -0,0 +1,213 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Service for computing deltas between mirror bundle exports.
/// </summary>
public interface IMirrorDeltaService
{
/// <summary>
/// Computes the delta between a base export and current items.
/// </summary>
/// <param name="request">Delta computation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Delta computation result.</returns>
Task<MirrorDeltaComputeResult> ComputeDeltaAsync(
MirrorDeltaComputeRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of the mirror delta service.
/// </summary>
public sealed class MirrorDeltaService : IMirrorDeltaService
{
private readonly IMirrorBaseManifestStore _manifestStore;
private readonly ILogger<MirrorDeltaService> _logger;
public MirrorDeltaService(
IMirrorBaseManifestStore manifestStore,
ILogger<MirrorDeltaService> logger)
{
_manifestStore = manifestStore ?? throw new ArgumentNullException(nameof(manifestStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<MirrorDeltaComputeResult> ComputeDeltaAsync(
MirrorDeltaComputeRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Computing delta against base export {BaseRunId} for tenant {TenantId}",
request.BaseRunId, request.TenantId);
// If reset baseline is requested, treat all items as added
if (request.ResetBaseline)
{
_logger.LogInformation("Baseline reset requested - all items will be included");
return CreateResetBaselineResult(request);
}
// Get base manifest entries
var baseEntries = await _manifestStore.GetBaseManifestEntriesAsync(
request.BaseRunId, request.TenantId, cancellationToken);
if (baseEntries is null || baseEntries.Count == 0)
{
_logger.LogWarning(
"Base manifest not found for run {BaseRunId}, treating as full export",
request.BaseRunId);
return CreateResetBaselineResult(request);
}
// Validate manifest digest
var storedDigest = await _manifestStore.GetManifestDigestAsync(
request.BaseRunId, request.TenantId, cancellationToken);
if (!string.IsNullOrEmpty(request.BaseManifestDigest) &&
!string.IsNullOrEmpty(storedDigest) &&
!string.Equals(request.BaseManifestDigest, storedDigest, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Manifest digest mismatch for base run {BaseRunId}: expected {Expected}, found {Found}",
request.BaseRunId, request.BaseManifestDigest, storedDigest);
return MirrorDeltaComputeResult.Failed(
$"Base manifest digest mismatch: expected {request.BaseManifestDigest}, found {storedDigest}");
}
// Build lookup for base entries by item ID
var baseByItemId = baseEntries.ToDictionary(
e => e.ItemId,
e => e,
StringComparer.OrdinalIgnoreCase);
// Build lookup for current items by item ID
var currentByItemId = request.CurrentItems.ToDictionary(
i => i.ItemId,
i => i,
StringComparer.OrdinalIgnoreCase);
var added = new List<MirrorDeltaItem>();
var changed = new List<MirrorDeltaChangeItem>();
var unchanged = new List<MirrorDeltaItem>();
var removed = new List<MirrorDeltaRemovedItem>();
// Find added and changed items
foreach (var current in request.CurrentItems)
{
if (!baseByItemId.TryGetValue(current.ItemId, out var baseEntry))
{
// New item
added.Add(current);
}
else if (!string.Equals(current.ContentHash, baseEntry.ContentHash, StringComparison.OrdinalIgnoreCase))
{
// Changed item (different content hash)
changed.Add(new MirrorDeltaChangeItem
{
Current = current,
PreviousContentHash = baseEntry.ContentHash,
PreviousSizeBytes = baseEntry.SizeBytes
});
}
else
{
// Unchanged item (same content hash)
unchanged.Add(current);
}
}
// Find removed items
foreach (var baseEntry in baseEntries)
{
if (!currentByItemId.ContainsKey(baseEntry.ItemId))
{
removed.Add(new MirrorDeltaRemovedItem
{
ItemId = baseEntry.ItemId,
Category = baseEntry.Category,
BundlePath = baseEntry.BundlePath,
ContentHash = baseEntry.ContentHash
});
}
}
var counts = ComputeCounts(added, changed, removed, unchanged);
_logger.LogInformation(
"Delta computed: {Added} added, {Changed} changed, {Removed} removed, {Unchanged} unchanged",
added.Count, changed.Count, removed.Count, unchanged.Count);
return new MirrorDeltaComputeResult
{
Success = true,
AddedItems = added,
ChangedItems = changed,
RemovedItems = removed,
UnchangedItems = unchanged,
BaseExportId = request.BaseRunId.ToString("D"),
BaseManifestDigest = storedDigest ?? request.BaseManifestDigest,
BaselineReset = false,
Counts = counts
};
}
private static MirrorDeltaComputeResult CreateResetBaselineResult(MirrorDeltaComputeRequest request)
{
var counts = new MirrorDeltaCategoryCounts
{
Added = CountByCategory(request.CurrentItems),
Changed = new MirrorBundleDeltaCounts(0, 0, 0),
Removed = new MirrorBundleDeltaCounts(0, 0, 0),
Unchanged = new MirrorBundleDeltaCounts(0, 0, 0)
};
return new MirrorDeltaComputeResult
{
Success = true,
AddedItems = request.CurrentItems.ToList(),
ChangedItems = [],
RemovedItems = [],
UnchangedItems = [],
BaseExportId = request.BaseRunId.ToString("D"),
BaseManifestDigest = request.BaseManifestDigest,
BaselineReset = true,
Counts = counts
};
}
private static MirrorDeltaCategoryCounts ComputeCounts(
IReadOnlyList<MirrorDeltaItem> added,
IReadOnlyList<MirrorDeltaChangeItem> changed,
IReadOnlyList<MirrorDeltaRemovedItem> removed,
IReadOnlyList<MirrorDeltaItem> unchanged)
{
return new MirrorDeltaCategoryCounts
{
Added = CountByCategory(added),
Changed = CountByCategory(changed.Select(c => c.Current).ToList()),
Removed = CountRemovedByCategory(removed),
Unchanged = CountByCategory(unchanged)
};
}
private static MirrorBundleDeltaCounts CountByCategory(IReadOnlyList<MirrorDeltaItem> items)
{
var advisories = items.Count(i => i.Category == MirrorBundleDataCategory.Advisories);
var vex = items.Count(i => i.Category is MirrorBundleDataCategory.Vex or MirrorBundleDataCategory.VexConsensus);
var sboms = items.Count(i => i.Category == MirrorBundleDataCategory.Sbom);
return new MirrorBundleDeltaCounts(advisories, vex, sboms);
}
private static MirrorBundleDeltaCounts CountRemovedByCategory(IReadOnlyList<MirrorDeltaRemovedItem> items)
{
var advisories = items.Count(i => i.Category == MirrorBundleDataCategory.Advisories);
var vex = items.Count(i => i.Category is MirrorBundleDataCategory.Vex or MirrorBundleDataCategory.VexConsensus);
var sboms = items.Count(i => i.Category == MirrorBundleDataCategory.Sbom);
return new MirrorBundleDeltaCounts(advisories, vex, sboms);
}
}

View File

@@ -0,0 +1,113 @@
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Service for integrating pack run artifacts and provenance into export bundles.
/// </summary>
public interface IPackRunIntegrationService
{
/// <summary>
/// Integrates a pack run's artifacts and provenance into an export bundle.
/// </summary>
Task<PackRunIntegrationResult> IntegrateAsync(
PackRunIntegrationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run reference for an export run.
/// </summary>
Task<PackRunExportReference?> GetReferenceAsync(
string tenantId,
string exportRunId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Lists all pack run references for an export run.
/// </summary>
Task<IReadOnlyList<PackRunExportReference>> ListReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Creates a provenance link between a pack run and export.
/// </summary>
Task<PackRunProvenanceLink> CreateProvenanceLinkAsync(
string tenantId,
string packRunId,
string exportRunId,
string evidenceRootHash,
string? attestationDigest,
IReadOnlyList<PackRunProvenanceSubject> subjects,
PackRunLinkKind linkKind = PackRunLinkKind.FullInclusion,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies pack run artifacts and provenance in an export.
/// </summary>
Task<PackRunVerificationResult> VerifyAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Store for pack run data used by integration service.
/// </summary>
public interface IPackRunDataStore
{
/// <summary>
/// Gets pack run evidence snapshot.
/// </summary>
Task<PackRunEvidenceExport?> GetEvidenceAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run attestation.
/// </summary>
Task<PackRunAttestationExport?> GetAttestationAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run artifacts.
/// </summary>
Task<IReadOnlyList<PackRunExportArtifact>> GetArtifactsAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run status.
/// </summary>
Task<PackRunStatusInfo?> GetStatusAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Opens artifact stream for reading.
/// </summary>
Task<Stream?> OpenArtifactAsync(
string tenantId,
string packRunId,
string artifactPath,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Pack run status information.
/// </summary>
public sealed record PackRunStatusInfo
{
public required string RunId { get; init; }
public required string TenantId { get; init; }
public required string PlanHash { get; init; }
public required string Status { get; init; }
public DateTimeOffset? StartedAt { get; init; }
public DateTimeOffset? CompletedAt { get; init; }
public Guid? EvidenceSnapshotId { get; init; }
public Guid? AttestationId { get; init; }
}

View File

@@ -0,0 +1,235 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// In-memory implementation of pack run data store for testing.
/// </summary>
public sealed class InMemoryPackRunDataStore : IPackRunDataStore
{
private readonly ConcurrentDictionary<string, PackRunStatusInfo> _statuses = new();
private readonly ConcurrentDictionary<string, PackRunEvidenceExport> _evidence = new();
private readonly ConcurrentDictionary<string, PackRunAttestationExport> _attestations = new();
private readonly ConcurrentDictionary<string, List<PackRunExportArtifact>> _artifacts = new();
private readonly ConcurrentDictionary<string, byte[]> _artifactContent = new();
/// <summary>
/// Adds a pack run status for testing.
/// </summary>
public void AddStatus(PackRunStatusInfo status)
{
var key = GetKey(status.TenantId, status.RunId);
_statuses[key] = status;
}
/// <summary>
/// Sets evidence for a pack run.
/// </summary>
public void SetEvidence(string tenantId, string packRunId, PackRunEvidenceExport evidence)
{
var key = GetKey(tenantId, packRunId);
_evidence[key] = evidence;
}
/// <summary>
/// Sets attestation for a pack run.
/// </summary>
public void SetAttestation(string tenantId, string packRunId, PackRunAttestationExport attestation)
{
var key = GetKey(tenantId, packRunId);
_attestations[key] = attestation;
}
/// <summary>
/// Adds an artifact for a pack run.
/// </summary>
public void AddArtifact(string tenantId, string packRunId, PackRunExportArtifact artifact, byte[] content)
{
var key = GetKey(tenantId, packRunId);
var contentKey = GetKey(tenantId, packRunId, artifact.Path);
_artifacts.AddOrUpdate(
key,
[artifact],
(_, list) => { list.Add(artifact); return list; });
_artifactContent[contentKey] = content;
}
/// <inheritdoc />
public Task<PackRunStatusInfo?> GetStatusAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
_statuses.TryGetValue(key, out var status);
return Task.FromResult(status);
}
/// <inheritdoc />
public Task<PackRunEvidenceExport?> GetEvidenceAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
_evidence.TryGetValue(key, out var evidence);
return Task.FromResult(evidence);
}
/// <inheritdoc />
public Task<PackRunAttestationExport?> GetAttestationAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
_attestations.TryGetValue(key, out var attestation);
return Task.FromResult(attestation);
}
/// <inheritdoc />
public Task<IReadOnlyList<PackRunExportArtifact>> GetArtifactsAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
if (_artifacts.TryGetValue(key, out var list))
{
return Task.FromResult<IReadOnlyList<PackRunExportArtifact>>(list);
}
return Task.FromResult<IReadOnlyList<PackRunExportArtifact>>([]);
}
/// <inheritdoc />
public Task<Stream?> OpenArtifactAsync(
string tenantId,
string packRunId,
string artifactPath,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId, artifactPath);
if (_artifactContent.TryGetValue(key, out var content))
{
return Task.FromResult<Stream?>(new MemoryStream(content));
}
return Task.FromResult<Stream?>(null);
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_statuses.Clear();
_evidence.Clear();
_attestations.Clear();
_artifacts.Clear();
_artifactContent.Clear();
}
private static string GetKey(string tenantId, string packRunId)
=> $"{tenantId}:{packRunId}";
private static string GetKey(string tenantId, string packRunId, string path)
=> $"{tenantId}:{packRunId}:{path}";
}
/// <summary>
/// In-memory implementation of pack run export store for testing.
/// </summary>
public sealed class InMemoryPackRunExportStore : IPackRunExportStore
{
private readonly ConcurrentDictionary<string, List<PackRunExportReference>> _references = new();
private readonly ConcurrentDictionary<string, byte[]> _artifacts = new();
/// <inheritdoc />
public Task SaveReferenceAsync(
string tenantId,
string exportRunId,
PackRunExportReference reference,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, exportRunId);
_references.AddOrUpdate(
key,
[reference],
(_, list) =>
{
// Remove existing reference for same pack run
list.RemoveAll(r => string.Equals(r.RunId, reference.RunId, StringComparison.OrdinalIgnoreCase));
list.Add(reference);
return list;
});
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<IReadOnlyList<PackRunExportReference>> GetReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, exportRunId);
if (_references.TryGetValue(key, out var list))
{
return Task.FromResult<IReadOnlyList<PackRunExportReference>>(list);
}
return Task.FromResult<IReadOnlyList<PackRunExportReference>>([]);
}
/// <inheritdoc />
public Task WriteArtifactAsync(
string tenantId,
string exportRunId,
string path,
Stream content,
CancellationToken cancellationToken = default)
{
var key = GetArtifactKey(tenantId, exportRunId, path);
using var ms = new MemoryStream();
content.CopyTo(ms);
_artifacts[key] = ms.ToArray();
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<Stream?> OpenArtifactAsync(
string tenantId,
string exportRunId,
string path,
CancellationToken cancellationToken = default)
{
var key = GetArtifactKey(tenantId, exportRunId, path);
if (_artifacts.TryGetValue(key, out var content))
{
return Task.FromResult<Stream?>(new MemoryStream(content));
}
return Task.FromResult<Stream?>(null);
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_references.Clear();
_artifacts.Clear();
}
private static string GetKey(string tenantId, string exportRunId)
=> $"{tenantId}:{exportRunId}";
private static string GetArtifactKey(string tenantId, string exportRunId, string path)
=> $"{tenantId}:{exportRunId}:{path}";
}

View File

@@ -0,0 +1,353 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Reference to a pack run included in an export.
/// </summary>
public sealed record PackRunExportReference
{
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("evidenceSnapshotId")]
public Guid? EvidenceSnapshotId { get; init; }
[JsonPropertyName("attestationId")]
public Guid? AttestationId { get; init; }
[JsonPropertyName("completedAt")]
public DateTimeOffset? CompletedAt { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("artifacts")]
public IReadOnlyList<PackRunExportArtifact> Artifacts { get; init; } = [];
[JsonPropertyName("provenanceLink")]
public PackRunProvenanceLink? ProvenanceLink { get; init; }
}
/// <summary>
/// Artifact from a pack run to include in export.
/// </summary>
public sealed record PackRunExportArtifact
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("sizeBytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("mediaType")]
public required string MediaType { get; init; }
[JsonPropertyName("category")]
public string? Category { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Provenance link from pack run to export bundle.
/// </summary>
public sealed record PackRunProvenanceLink
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0";
[JsonPropertyName("packRunId")]
public required string PackRunId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("evidenceRootHash")]
public required string EvidenceRootHash { get; init; }
[JsonPropertyName("attestationDigest")]
public string? AttestationDigest { get; init; }
[JsonPropertyName("exportRunId")]
public required string ExportRunId { get; init; }
[JsonPropertyName("exportBundleHash")]
public string? ExportBundleHash { get; init; }
[JsonPropertyName("linkedAt")]
public required DateTimeOffset LinkedAt { get; init; }
[JsonPropertyName("linkKind")]
public PackRunLinkKind LinkKind { get; init; } = PackRunLinkKind.FullInclusion;
[JsonPropertyName("subjects")]
public IReadOnlyList<PackRunProvenanceSubject> Subjects { get; init; } = [];
}
/// <summary>
/// Subject included in provenance link.
/// </summary>
public sealed record PackRunProvenanceSubject
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("digest")]
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}
/// <summary>
/// Kind of pack run link to export.
/// </summary>
public enum PackRunLinkKind
{
/// <summary>Full pack run artifacts included in export.</summary>
FullInclusion,
/// <summary>Only provenance reference included, artifacts external.</summary>
ProvenanceOnly,
/// <summary>Selective artifacts included based on filter.</summary>
SelectiveInclusion,
/// <summary>Delta from previous export.</summary>
DeltaInclusion
}
/// <summary>
/// Request to integrate pack run into an export bundle.
/// </summary>
public sealed record PackRunIntegrationRequest
{
public required string TenantId { get; init; }
public required string PackRunId { get; init; }
public required string ExportRunId { get; init; }
public PackRunLinkKind LinkKind { get; init; } = PackRunLinkKind.FullInclusion;
public IReadOnlyList<string>? ArtifactFilter { get; init; }
public bool IncludeEvidence { get; init; } = true;
public bool IncludeAttestation { get; init; } = true;
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Result of pack run integration.
/// </summary>
public sealed record PackRunIntegrationResult
{
public bool Success { get; init; }
public string? ErrorCode { get; init; }
public string? ErrorMessage { get; init; }
public PackRunExportReference? Reference { get; init; }
public IReadOnlyList<IntegratedPackRunArtifact> IntegratedArtifacts { get; init; } = [];
public static PackRunIntegrationResult Succeeded(
PackRunExportReference reference,
IReadOnlyList<IntegratedPackRunArtifact> artifacts) => new()
{
Success = true,
Reference = reference,
IntegratedArtifacts = artifacts
};
public static PackRunIntegrationResult Failed(string errorCode, string message) => new()
{
Success = false,
ErrorCode = errorCode,
ErrorMessage = message
};
}
/// <summary>
/// Artifact that was integrated into the export.
/// </summary>
public sealed record IntegratedPackRunArtifact
{
public required string SourcePath { get; init; }
public required string ExportPath { get; init; }
public required string Sha256 { get; init; }
public long SizeBytes { get; init; }
public required string MediaType { get; init; }
}
/// <summary>
/// Pack run evidence to include in export.
/// </summary>
public sealed record PackRunEvidenceExport
{
[JsonPropertyName("snapshotId")]
public required Guid SnapshotId { get; init; }
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("rootHash")]
public required string RootHash { get; init; }
[JsonPropertyName("kind")]
public required string Kind { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("materialCount")]
public int MaterialCount { get; init; }
[JsonPropertyName("materials")]
public IReadOnlyList<PackRunMaterialExport> Materials { get; init; } = [];
}
/// <summary>
/// Material from pack run evidence snapshot.
/// </summary>
public sealed record PackRunMaterialExport
{
[JsonPropertyName("section")]
public required string Section { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("sizeBytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("mediaType")]
public required string MediaType { get; init; }
}
/// <summary>
/// Pack run attestation to include in export.
/// </summary>
public sealed record PackRunAttestationExport
{
[JsonPropertyName("attestationId")]
public required Guid AttestationId { get; init; }
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("subjectCount")]
public int SubjectCount { get; init; }
[JsonPropertyName("envelopeDigest")]
public string? EnvelopeDigest { get; init; }
[JsonPropertyName("subjects")]
public IReadOnlyList<PackRunProvenanceSubject> Subjects { get; init; } = [];
[JsonPropertyName("dsseEnvelope")]
public string? DsseEnvelopeJson { get; init; }
}
/// <summary>
/// Verification request for pack run artifacts in export.
/// </summary>
public sealed record PackRunVerificationRequest
{
public required string TenantId { get; init; }
public required string ExportRunId { get; init; }
public string? PackRunId { get; init; }
public bool VerifyHashes { get; init; } = true;
public bool VerifyAttestation { get; init; } = true;
public bool VerifyProvenance { get; init; } = true;
public IReadOnlyList<string>? TrustedKeys { get; init; }
}
/// <summary>
/// Verification result for pack run artifacts in export.
/// </summary>
public sealed record PackRunVerificationResult
{
public bool IsValid { get; init; }
public required string ExportRunId { get; init; }
public string? PackRunId { get; init; }
public PackRunProvenanceVerificationStatus ProvenanceStatus { get; init; }
public PackRunAttestationVerificationStatus AttestationStatus { get; init; }
public IReadOnlyList<PackRunHashVerificationResult> HashResults { get; init; } = [];
public IReadOnlyList<string> Errors { get; init; } = [];
public IReadOnlyList<string> Warnings { get; init; } = [];
public DateTimeOffset VerifiedAt { get; init; }
}
/// <summary>
/// Provenance verification status.
/// </summary>
public enum PackRunProvenanceVerificationStatus
{
NotVerified,
Valid,
Invalid,
MissingLink,
HashMismatch
}
/// <summary>
/// Attestation verification status for pack runs.
/// </summary>
public enum PackRunAttestationVerificationStatus
{
NotVerified,
Valid,
Invalid,
SignatureInvalid,
SubjectMismatch,
NotFound
}
/// <summary>
/// Hash verification result for a pack run artifact.
/// </summary>
public sealed record PackRunHashVerificationResult
{
public required string ArtifactPath { get; init; }
public bool IsValid { get; init; }
public required string ExpectedHash { get; init; }
public string? ComputedHash { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Error codes for pack run integration.
/// </summary>
public static class PackRunIntegrationErrors
{
public const string PackRunNotFound = "PACK_RUN_NOT_FOUND";
public const string TenantMismatch = "TENANT_MISMATCH";
public const string EvidenceNotFound = "EVIDENCE_NOT_FOUND";
public const string AttestationNotFound = "ATTESTATION_NOT_FOUND";
public const string ArtifactNotFound = "ARTIFACT_NOT_FOUND";
public const string HashMismatch = "HASH_MISMATCH";
public const string IntegrationFailed = "INTEGRATION_FAILED";
public const string ProvenanceLinkFailed = "PROVENANCE_LINK_FAILED";
}

View File

@@ -0,0 +1,478 @@
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Default implementation of pack run integration service.
/// </summary>
public sealed class PackRunIntegrationService : IPackRunIntegrationService
{
private readonly IPackRunDataStore _dataStore;
private readonly IPackRunExportStore _exportStore;
private readonly ILogger<PackRunIntegrationService> _logger;
private readonly TimeProvider _timeProvider;
public PackRunIntegrationService(
IPackRunDataStore dataStore,
IPackRunExportStore exportStore,
ILogger<PackRunIntegrationService> logger,
TimeProvider timeProvider)
{
_dataStore = dataStore ?? throw new ArgumentNullException(nameof(dataStore));
_exportStore = exportStore ?? throw new ArgumentNullException(nameof(exportStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
public async Task<PackRunIntegrationResult> IntegrateAsync(
PackRunIntegrationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Integrating pack run {PackRunId} into export {ExportRunId} for tenant {TenantId}",
request.PackRunId,
request.ExportRunId,
request.TenantId);
// Get pack run status
var status = await _dataStore.GetStatusAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
if (status is null)
{
_logger.LogWarning(
"Pack run {PackRunId} not found for tenant {TenantId}",
request.PackRunId,
request.TenantId);
return PackRunIntegrationResult.Failed(
PackRunIntegrationErrors.PackRunNotFound,
$"Pack run {request.PackRunId} not found.");
}
if (!string.Equals(status.TenantId, request.TenantId, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Tenant mismatch for pack run {PackRunId}: expected {Expected}, got {Actual}",
request.PackRunId,
request.TenantId,
status.TenantId);
return PackRunIntegrationResult.Failed(
PackRunIntegrationErrors.TenantMismatch,
"Pack run belongs to a different tenant.");
}
// Get artifacts
var artifacts = await _dataStore.GetArtifactsAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
// Apply filter if specified
if (request.ArtifactFilter is { Count: > 0 })
{
var filterSet = new HashSet<string>(request.ArtifactFilter, StringComparer.OrdinalIgnoreCase);
artifacts = artifacts.Where(a => filterSet.Contains(a.Name) || filterSet.Contains(a.Path)).ToList();
}
// Get evidence and attestation if requested
PackRunEvidenceExport? evidence = null;
PackRunAttestationExport? attestation = null;
if (request.IncludeEvidence)
{
evidence = await _dataStore.GetEvidenceAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
}
if (request.IncludeAttestation)
{
attestation = await _dataStore.GetAttestationAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
}
// Create provenance link
var subjects = artifacts.Select(a => new PackRunProvenanceSubject
{
Name = a.Path,
Digest = ParseDigest(a.Sha256)
}).ToList();
var provenanceLink = await CreateProvenanceLinkAsync(
request.TenantId,
request.PackRunId,
request.ExportRunId,
evidence?.RootHash ?? "sha256:" + new string('0', 64),
attestation?.EnvelopeDigest,
subjects,
request.LinkKind,
cancellationToken);
// Build reference
var reference = new PackRunExportReference
{
RunId = request.PackRunId,
TenantId = request.TenantId,
PlanHash = status.PlanHash,
EvidenceSnapshotId = status.EvidenceSnapshotId,
AttestationId = status.AttestationId,
CompletedAt = status.CompletedAt,
Status = status.Status,
Artifacts = artifacts,
ProvenanceLink = provenanceLink
};
// Copy artifacts to export store
var integratedArtifacts = new List<IntegratedPackRunArtifact>();
foreach (var artifact in artifacts)
{
var exportPath = $"pack-runs/{request.PackRunId}/{artifact.Path}";
await using var stream = await _dataStore.OpenArtifactAsync(
request.TenantId,
request.PackRunId,
artifact.Path,
cancellationToken);
if (stream is not null)
{
await _exportStore.WriteArtifactAsync(
request.TenantId,
request.ExportRunId,
exportPath,
stream,
cancellationToken);
integratedArtifacts.Add(new IntegratedPackRunArtifact
{
SourcePath = artifact.Path,
ExportPath = exportPath,
Sha256 = artifact.Sha256,
SizeBytes = artifact.SizeBytes,
MediaType = artifact.MediaType
});
}
}
// Store reference
await _exportStore.SaveReferenceAsync(
request.TenantId,
request.ExportRunId,
reference,
cancellationToken);
_logger.LogInformation(
"Successfully integrated pack run {PackRunId} into export {ExportRunId}: {ArtifactCount} artifacts",
request.PackRunId,
request.ExportRunId,
integratedArtifacts.Count);
return PackRunIntegrationResult.Succeeded(reference, integratedArtifacts);
}
public async Task<PackRunExportReference?> GetReferenceAsync(
string tenantId,
string exportRunId,
string packRunId,
CancellationToken cancellationToken = default)
{
var references = await _exportStore.GetReferencesAsync(tenantId, exportRunId, cancellationToken);
return references.FirstOrDefault(r =>
string.Equals(r.RunId, packRunId, StringComparison.OrdinalIgnoreCase));
}
public async Task<IReadOnlyList<PackRunExportReference>> ListReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default)
{
return await _exportStore.GetReferencesAsync(tenantId, exportRunId, cancellationToken);
}
public Task<PackRunProvenanceLink> CreateProvenanceLinkAsync(
string tenantId,
string packRunId,
string exportRunId,
string evidenceRootHash,
string? attestationDigest,
IReadOnlyList<PackRunProvenanceSubject> subjects,
PackRunLinkKind linkKind = PackRunLinkKind.FullInclusion,
CancellationToken cancellationToken = default)
{
var link = new PackRunProvenanceLink
{
PackRunId = packRunId,
PlanHash = "", // Will be populated from status
EvidenceRootHash = evidenceRootHash,
AttestationDigest = attestationDigest,
ExportRunId = exportRunId,
LinkedAt = _timeProvider.GetUtcNow(),
LinkKind = linkKind,
Subjects = subjects
};
return Task.FromResult(link);
}
public async Task<PackRunVerificationResult> VerifyAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var errors = new List<string>();
var warnings = new List<string>();
var hashResults = new List<PackRunHashVerificationResult>();
var provenanceStatus = PackRunProvenanceVerificationStatus.NotVerified;
var attestationStatus = PackRunAttestationVerificationStatus.NotVerified;
// Get references for export
var references = await _exportStore.GetReferencesAsync(
request.TenantId,
request.ExportRunId,
cancellationToken);
if (request.PackRunId is not null)
{
references = references.Where(r =>
string.Equals(r.RunId, request.PackRunId, StringComparison.OrdinalIgnoreCase)).ToList();
}
if (references.Count == 0)
{
errors.Add("No pack run references found in export.");
return new PackRunVerificationResult
{
IsValid = false,
ExportRunId = request.ExportRunId,
PackRunId = request.PackRunId,
ProvenanceStatus = PackRunProvenanceVerificationStatus.MissingLink,
AttestationStatus = PackRunAttestationVerificationStatus.NotFound,
HashResults = hashResults,
Errors = errors,
Warnings = warnings,
VerifiedAt = _timeProvider.GetUtcNow()
};
}
foreach (var reference in references)
{
// Verify provenance link
if (request.VerifyProvenance && reference.ProvenanceLink is not null)
{
var linkValid = !string.IsNullOrEmpty(reference.ProvenanceLink.EvidenceRootHash) &&
reference.ProvenanceLink.Subjects.Count > 0;
provenanceStatus = linkValid
? PackRunProvenanceVerificationStatus.Valid
: PackRunProvenanceVerificationStatus.Invalid;
if (!linkValid)
{
errors.Add($"Invalid provenance link for pack run {reference.RunId}.");
}
}
// Verify attestation
if (request.VerifyAttestation && reference.AttestationId.HasValue)
{
var attestation = await _dataStore.GetAttestationAsync(
request.TenantId,
reference.RunId,
cancellationToken);
if (attestation is not null)
{
if (attestation.Status == "Signed")
{
// Verify trusted keys if provided
if (request.TrustedKeys is { Count: > 0 } && attestation.DsseEnvelopeJson is not null)
{
// Parse envelope and check key IDs
// Simplified: just check if any key matches
var keyFound = false;
foreach (var subject in attestation.Subjects)
{
// In real implementation, verify actual signatures
keyFound = true;
}
attestationStatus = keyFound
? PackRunAttestationVerificationStatus.Valid
: PackRunAttestationVerificationStatus.SignatureInvalid;
}
else
{
attestationStatus = PackRunAttestationVerificationStatus.Valid;
}
}
else
{
attestationStatus = PackRunAttestationVerificationStatus.Invalid;
warnings.Add($"Attestation for pack run {reference.RunId} is not signed (status: {attestation.Status}).");
}
}
else
{
attestationStatus = PackRunAttestationVerificationStatus.NotFound;
warnings.Add($"Attestation not found for pack run {reference.RunId}.");
}
}
// Verify artifact hashes
if (request.VerifyHashes)
{
foreach (var artifact in reference.Artifacts)
{
var exportPath = $"pack-runs/{reference.RunId}/{artifact.Path}";
await using var stream = await _exportStore.OpenArtifactAsync(
request.TenantId,
request.ExportRunId,
exportPath,
cancellationToken);
if (stream is not null)
{
var computedHash = await ComputeHashAsync(stream, cancellationToken);
var expectedHash = NormalizeHash(artifact.Sha256);
var hashValid = string.Equals(computedHash, expectedHash, StringComparison.OrdinalIgnoreCase);
hashResults.Add(new PackRunHashVerificationResult
{
ArtifactPath = exportPath,
IsValid = hashValid,
ExpectedHash = expectedHash,
ComputedHash = computedHash,
Error = hashValid ? null : "Hash mismatch"
});
if (!hashValid)
{
errors.Add($"Hash mismatch for artifact {exportPath}.");
}
}
else
{
hashResults.Add(new PackRunHashVerificationResult
{
ArtifactPath = exportPath,
IsValid = false,
ExpectedHash = artifact.Sha256,
Error = "Artifact not found in export"
});
errors.Add($"Artifact not found: {exportPath}.");
}
}
}
}
var isValid = errors.Count == 0 &&
(provenanceStatus == PackRunProvenanceVerificationStatus.Valid ||
provenanceStatus == PackRunProvenanceVerificationStatus.NotVerified) &&
(attestationStatus == PackRunAttestationVerificationStatus.Valid ||
attestationStatus == PackRunAttestationVerificationStatus.NotVerified);
return new PackRunVerificationResult
{
IsValid = isValid,
ExportRunId = request.ExportRunId,
PackRunId = request.PackRunId,
ProvenanceStatus = provenanceStatus,
AttestationStatus = attestationStatus,
HashResults = hashResults,
Errors = errors,
Warnings = warnings,
VerifiedAt = _timeProvider.GetUtcNow()
};
}
private static IReadOnlyDictionary<string, string> ParseDigest(string hash)
{
var digest = new Dictionary<string, string>();
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
digest["sha256"] = hash[7..];
}
else if (hash.StartsWith("sha512:", StringComparison.OrdinalIgnoreCase))
{
digest["sha512"] = hash[7..];
}
else
{
digest["sha256"] = hash;
}
return digest;
}
private static string NormalizeHash(string hash)
{
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
return hash[7..].ToLowerInvariant();
}
return hash.ToLowerInvariant();
}
private static async Task<string> ComputeHashAsync(Stream stream, CancellationToken cancellationToken)
{
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Store for pack run export data.
/// </summary>
public interface IPackRunExportStore
{
/// <summary>
/// Saves pack run reference to export.
/// </summary>
Task SaveReferenceAsync(
string tenantId,
string exportRunId,
PackRunExportReference reference,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run references for export.
/// </summary>
Task<IReadOnlyList<PackRunExportReference>> GetReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Writes artifact to export store.
/// </summary>
Task WriteArtifactAsync(
string tenantId,
string exportRunId,
string path,
Stream content,
CancellationToken cancellationToken = default);
/// <summary>
/// Opens artifact from export store.
/// </summary>
Task<Stream?> OpenArtifactAsync(
string tenantId,
string exportRunId,
string path,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,39 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Extension methods for registering pack run integration services.
/// </summary>
public static class PackRunIntegrationServiceCollectionExtensions
{
/// <summary>
/// Registers pack run integration services with in-memory stores.
/// </summary>
public static IServiceCollection AddPackRunIntegration(this IServiceCollection services)
{
services.AddSingleton<InMemoryPackRunDataStore>();
services.AddSingleton<IPackRunDataStore>(sp => sp.GetRequiredService<InMemoryPackRunDataStore>());
services.AddSingleton<InMemoryPackRunExportStore>();
services.AddSingleton<IPackRunExportStore>(sp => sp.GetRequiredService<InMemoryPackRunExportStore>());
services.AddSingleton<IPackRunIntegrationService, PackRunIntegrationService>();
return services;
}
/// <summary>
/// Registers pack run integration services with custom stores.
/// </summary>
public static IServiceCollection AddPackRunIntegration<TDataStore, TExportStore>(this IServiceCollection services)
where TDataStore : class, IPackRunDataStore
where TExportStore : class, IPackRunExportStore
{
services.AddSingleton<IPackRunDataStore, TDataStore>();
services.AddSingleton<IPackRunExportStore, TExportStore>();
services.AddSingleton<IPackRunIntegrationService, PackRunIntegrationService>();
return services;
}
}

View File

@@ -15,6 +15,11 @@ public sealed record ExportPlanRequest
public ExportFormatOptions? FormatOverride { get; init; }
/// <summary>
/// Distribution targets for the export artifacts.
/// </summary>
public IReadOnlyList<ExportDistributionTargetSpec>? DistributionTargets { get; init; }
public string? CorrelationId { get; init; }
public string? InitiatedBy { get; init; }
@@ -22,6 +27,31 @@ public sealed record ExportPlanRequest
public bool DryRun { get; init; }
}
/// <summary>
/// Specification for a distribution target in a plan request.
/// </summary>
public sealed record ExportDistributionTargetSpec
{
public required Domain.ExportDistributionKind Kind { get; init; }
public required string Target { get; init; }
/// <summary>
/// Idempotency key to prevent duplicate distributions.
/// </summary>
public string? IdempotencyKey { get; init; }
/// <summary>
/// Target-specific configuration (JSON).
/// </summary>
public string? ConfigJson { get; init; }
/// <summary>
/// Retention policy for this target.
/// </summary>
public Domain.ExportRetentionPolicy? RetentionPolicy { get; init; }
}
/// <summary>
/// Output format configuration for exports.
/// </summary>
@@ -77,7 +107,17 @@ public enum ExportFormat
/// <summary>
/// Full mirror layout with indexes.
/// </summary>
Mirror = 5
Mirror = 5,
/// <summary>
/// Trivy vulnerability database format (schema v2).
/// </summary>
TrivyDb = 6,
/// <summary>
/// Trivy Java database format (Maven/Gradle/SBT supplement).
/// </summary>
TrivyJavaDb = 7
}
/// <summary>
@@ -110,6 +150,11 @@ public sealed record ExportPlan
public IReadOnlyList<ExportPlanPhase> Phases { get; init; } = [];
/// <summary>
/// Resolved distribution targets for the plan.
/// </summary>
public IReadOnlyList<ExportPlanDistributionTarget> DistributionTargets { get; init; } = [];
public int TotalItems { get; init; }
public long EstimatedSizeBytes { get; init; }
@@ -129,6 +174,34 @@ public sealed record ExportPlan
public IReadOnlyList<ExportValidationError> ValidationErrors { get; init; } = [];
}
/// <summary>
/// A resolved distribution target in an export plan.
/// </summary>
public sealed record ExportPlanDistributionTarget
{
public required Guid TargetId { get; init; }
public required Domain.ExportDistributionKind Kind { get; init; }
public required string Target { get; init; }
public string? IdempotencyKey { get; init; }
public string? ConfigJson { get; init; }
public Domain.ExportRetentionPolicy? RetentionPolicy { get; init; }
/// <summary>
/// Estimated time to complete distribution to this target.
/// </summary>
public TimeSpan EstimatedDuration { get; init; }
/// <summary>
/// Priority for distribution ordering (lower = higher priority).
/// </summary>
public int Priority { get; init; }
}
/// <summary>
/// Status of an export plan.
/// </summary>
@@ -230,7 +303,17 @@ public enum ExportPhaseKind
/// <summary>
/// Verify distribution.
/// </summary>
Verify = 8
Verify = 8,
/// <summary>
/// Apply retention policies.
/// </summary>
ApplyRetention = 9,
/// <summary>
/// Cleanup and finalization.
/// </summary>
Finalize = 10
}
/// <summary>

View File

@@ -0,0 +1,286 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Default implementation of the export retention service.
/// </summary>
public sealed class ExportRetentionService : IExportRetentionService
{
private readonly IExportRetentionStore _retentionStore;
private readonly ILogger<ExportRetentionService> _logger;
public ExportRetentionService(
IExportRetentionStore retentionStore,
ILogger<ExportRetentionService> logger)
{
_retentionStore = retentionStore ?? throw new ArgumentNullException(nameof(retentionStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<RetentionPruneResult> PruneAsync(
RetentionPruneRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var retention = request.OverrideRetention ?? new ExportRetentionConfig();
var now = DateTimeOffset.UtcNow;
_logger.LogInformation(
"Starting retention prune for tenant {TenantId}, profile {ProfileId}, execute={Execute}",
request.TenantId, request.ProfileId, request.Execute);
// Get runs eligible for pruning
var eligibleRuns = await GetRunsEligibleForPruningAsync(
request.TenantId,
request.ProfileId,
retention,
now,
cancellationToken);
if (eligibleRuns.Count == 0)
{
_logger.LogInformation("No runs eligible for pruning");
return new RetentionPruneResult { Success = true };
}
var prunedRuns = new List<PrunedRunInfo>();
var errors = new List<string>();
int totalArtifactsDeleted = 0;
long totalBytesFreed = 0;
int runsSkippedLegalHold = 0;
foreach (var runId in eligibleRuns)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var runInfo = await _retentionStore.GetRunInfoAsync(runId, cancellationToken);
if (runInfo is null)
continue;
// Check legal hold
if (retention.RespectLegalHold && runInfo.HasLegalHold)
{
_logger.LogDebug("Skipping run {RunId}: has legal hold", runId);
runsSkippedLegalHold++;
continue;
}
if (request.Execute)
{
// Delete artifacts first
var deleteResult = await _retentionStore.DeleteRunArtifactsAsync(runId, cancellationToken);
// Delete run record
await _retentionStore.DeleteRunAsync(runId, cancellationToken);
prunedRuns.Add(new PrunedRunInfo
{
RunId = runId,
ProfileId = runInfo.ProfileId,
CompletedAt = runInfo.CompletedAt,
ArtifactsDeleted = deleteResult.ArtifactsDeleted,
BytesFreed = deleteResult.BytesFreed
});
totalArtifactsDeleted += deleteResult.ArtifactsDeleted;
totalBytesFreed += deleteResult.BytesFreed;
_logger.LogDebug(
"Pruned run {RunId}: {Artifacts} artifacts, {Bytes} bytes",
runId, deleteResult.ArtifactsDeleted, deleteResult.BytesFreed);
}
else
{
// Dry run - just record what would be pruned
prunedRuns.Add(new PrunedRunInfo
{
RunId = runId,
ProfileId = runInfo.ProfileId,
CompletedAt = runInfo.CompletedAt,
ArtifactsDeleted = runInfo.ArtifactCount,
BytesFreed = runInfo.TotalSizeBytes
});
totalArtifactsDeleted += runInfo.ArtifactCount;
totalBytesFreed += runInfo.TotalSizeBytes;
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to prune run {RunId}", runId);
errors.Add($"Run {runId}: {ex.Message}");
}
}
_logger.LogInformation(
"Retention prune complete: {RunsPruned} runs, {ArtifactsDeleted} artifacts, {BytesFreed} bytes freed, {Skipped} skipped (legal hold)",
prunedRuns.Count, totalArtifactsDeleted, totalBytesFreed, runsSkippedLegalHold);
return new RetentionPruneResult
{
Success = errors.Count == 0,
RunsPruned = prunedRuns.Count,
ArtifactsDeleted = totalArtifactsDeleted,
BytesFreed = totalBytesFreed,
RunsSkippedLegalHold = runsSkippedLegalHold,
Errors = errors,
PrunedRuns = prunedRuns
};
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> GetRunsEligibleForPruningAsync(
Guid tenantId,
Guid? profileId,
ExportRetentionConfig retention,
DateTimeOffset asOf,
CancellationToken cancellationToken = default)
{
var eligibleRuns = new List<Guid>();
// Get all profiles to check
var profileIds = profileId.HasValue
? [profileId.Value]
: await _retentionStore.GetProfileIdsAsync(tenantId, cancellationToken);
foreach (var pid in profileIds)
{
// Get runs for this profile
var runs = await _retentionStore.GetRunsForProfileAsync(pid, cancellationToken);
// Sort by completion time descending (newest first)
var sortedRuns = runs
.Where(r => r.CompletedAt.HasValue)
.OrderByDescending(r => r.CompletedAt)
.ToList();
// Keep minimum runs
var runsToKeep = Math.Max(retention.MinimumRunsToRetain, 0);
var keptCount = 0;
foreach (var run in sortedRuns)
{
// Always keep minimum number of runs
if (keptCount < runsToKeep)
{
keptCount++;
continue;
}
// Check expiration
var isExpired = run.ExpiresAt.HasValue && run.ExpiresAt.Value <= asOf;
// Check max runs per profile
var exceedsMaxRuns = sortedRuns.IndexOf(run) >= retention.MaxRunsPerProfile;
if (isExpired || exceedsMaxRuns)
{
eligibleRuns.Add(run.RunId);
}
}
}
return eligibleRuns;
}
/// <inheritdoc />
public async Task SetLegalHoldAsync(
Guid runId,
bool hold,
string? reason = null,
CancellationToken cancellationToken = default)
{
_logger.LogInformation(
"Setting legal hold for run {RunId}: hold={Hold}, reason={Reason}",
runId, hold, reason);
await _retentionStore.SetLegalHoldAsync(runId, hold, reason, cancellationToken);
}
/// <inheritdoc />
public DateTimeOffset ComputeExpiration(
ExportRetentionConfig retention,
DateTimeOffset completedAt,
bool success)
{
var days = success ? retention.SuccessfulRunDays : retention.FailedRunDays;
return completedAt.AddDays(days);
}
}
/// <summary>
/// Store interface for retention operations.
/// </summary>
public interface IExportRetentionStore
{
/// <summary>
/// Gets all profile IDs for a tenant.
/// </summary>
Task<IReadOnlyList<Guid>> GetProfileIdsAsync(Guid tenantId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets runs for a profile.
/// </summary>
Task<IReadOnlyList<RetentionRunInfo>> GetRunsForProfileAsync(Guid profileId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets detailed run info.
/// </summary>
Task<DetailedRunInfo?> GetRunInfoAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes artifacts for a run.
/// </summary>
Task<ArtifactDeleteResult> DeleteRunArtifactsAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a run record.
/// </summary>
Task DeleteRunAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Sets legal hold on a run.
/// </summary>
Task SetLegalHoldAsync(Guid runId, bool hold, string? reason, CancellationToken cancellationToken = default);
}
/// <summary>
/// Run info for retention decisions.
/// </summary>
public sealed record RetentionRunInfo
{
public required Guid RunId { get; init; }
public required Guid ProfileId { get; init; }
public DateTimeOffset? CompletedAt { get; init; }
public DateTimeOffset? ExpiresAt { get; init; }
public bool Success { get; init; }
public bool HasLegalHold { get; init; }
}
/// <summary>
/// Detailed run info for pruning.
/// </summary>
public sealed record DetailedRunInfo
{
public required Guid RunId { get; init; }
public required Guid ProfileId { get; init; }
public required DateTimeOffset CompletedAt { get; init; }
public bool HasLegalHold { get; init; }
public string? LegalHoldReason { get; init; }
public int ArtifactCount { get; init; }
public long TotalSizeBytes { get; init; }
}
/// <summary>
/// Result of artifact deletion.
/// </summary>
public sealed record ArtifactDeleteResult
{
public int ArtifactsDeleted { get; init; }
public long BytesFreed { get; init; }
}

View File

@@ -0,0 +1,335 @@
using System.Collections.Concurrent;
using System.Net;
using System.Net.Sockets;
using Cronos;
using Microsoft.Extensions.Logging;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Default implementation of the export scheduler service.
/// </summary>
public sealed class ExportSchedulerService : IExportSchedulerService
{
private readonly IExportScheduleStore _scheduleStore;
private readonly ILogger<ExportSchedulerService> _logger;
private readonly ConcurrentDictionary<string, CronExpression> _cronCache = new();
// Pause profiles after this many consecutive failures
private const int MaxConsecutiveFailuresBeforePause = 10;
public ExportSchedulerService(
IExportScheduleStore scheduleStore,
ILogger<ExportSchedulerService> logger)
{
_scheduleStore = scheduleStore ?? throw new ArgumentNullException(nameof(scheduleStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public DateTimeOffset? GetNextScheduledTime(
Guid profileId,
string cronExpression,
string timezone,
DateTimeOffset from)
{
if (string.IsNullOrWhiteSpace(cronExpression))
return null;
try
{
var cron = GetOrParseCron(cronExpression);
var tz = TimeZoneInfo.FindSystemTimeZoneById(timezone) ?? TimeZoneInfo.Utc;
var next = cron.GetNextOccurrence(from.UtcDateTime, tz);
return next.HasValue ? new DateTimeOffset(next.Value, TimeSpan.Zero) : null;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to compute next schedule for profile {ProfileId}", profileId);
return null;
}
}
/// <inheritdoc />
public async Task<ExportTriggerResult> TriggerAsync(
ExportTriggerRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Triggering export for profile {ProfileId} from {Source}",
request.ProfileId, request.Source);
// Get current status
var status = await _scheduleStore.GetStatusAsync(request.ProfileId, cancellationToken);
// Check if profile is paused due to failures (unless forced)
if (!request.Force && status?.IsPausedDueToFailures == true)
{
_logger.LogWarning(
"Trigger rejected for profile {ProfileId}: paused due to {Failures} consecutive failures",
request.ProfileId, status.ConsecutiveFailures);
return ExportTriggerResult.Rejected(
ExportTriggerRejection.PausedDueToFailures,
$"Profile paused after {status.ConsecutiveFailures} consecutive failures");
}
// Check if already running
if (status?.IsRunning == true)
{
_logger.LogInformation(
"Trigger rejected for profile {ProfileId}: already running (run {RunId})",
request.ProfileId, status.CurrentRunId);
return ExportTriggerResult.Rejected(
ExportTriggerRejection.ConcurrencyLimitReached,
$"Profile already running (run {status.CurrentRunId})");
}
// Create new run
var runId = Guid.NewGuid();
await _scheduleStore.RecordTriggerAsync(
request.ProfileId,
runId,
request.Source,
request.CorrelationId,
request.InitiatedBy,
cancellationToken);
_logger.LogInformation(
"Created run {RunId} for profile {ProfileId}",
runId, request.ProfileId);
return ExportTriggerResult.Success(runId);
}
/// <inheritdoc />
public Task<ScheduledExportStatus?> GetStatusAsync(
Guid profileId,
CancellationToken cancellationToken = default)
{
return _scheduleStore.GetStatusAsync(profileId, cancellationToken);
}
/// <inheritdoc />
public async Task UpdateRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure = null,
CancellationToken cancellationToken = default)
{
_logger.LogInformation(
"Updating run completion for {RunId}: success={Success}",
runId, success);
await _scheduleStore.RecordRunCompletionAsync(
runId,
success,
failure,
cancellationToken);
// Check if we should pause the profile
if (!success && failure?.Class != ExportFailureClass.Cancelled)
{
var status = await _scheduleStore.GetStatusByRunAsync(runId, cancellationToken);
if (status?.ConsecutiveFailures >= MaxConsecutiveFailuresBeforePause)
{
_logger.LogWarning(
"Pausing profile {ProfileId} after {Failures} consecutive failures",
status.ProfileId, status.ConsecutiveFailures);
await _scheduleStore.SetPausedAsync(status.ProfileId, true, cancellationToken);
}
}
}
/// <inheritdoc />
public (bool IsValid, string? ErrorMessage) ValidateCronExpression(string cronExpression)
{
if (string.IsNullOrWhiteSpace(cronExpression))
return (false, "Cron expression cannot be empty");
try
{
// Try parsing - support both 5-field (minute-only) and 6-field (with seconds)
var format = cronExpression.Trim().Split(' ').Length == 6
? CronFormat.IncludeSeconds
: CronFormat.Standard;
CronExpression.Parse(cronExpression, format);
return (true, null);
}
catch (CronFormatException ex)
{
return (false, $"Invalid cron expression: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> GetProfilesDueForExecutionAsync(
Guid tenantId,
DateTimeOffset asOf,
CancellationToken cancellationToken = default)
{
var profiles = await _scheduleStore.GetScheduledProfilesAsync(tenantId, cancellationToken);
var due = new List<Guid>();
foreach (var profile in profiles)
{
if (string.IsNullOrWhiteSpace(profile.CronExpression))
continue;
var status = await _scheduleStore.GetStatusAsync(profile.ProfileId, cancellationToken);
// Skip if running or paused
if (status?.IsRunning == true || status?.IsPausedDueToFailures == true)
continue;
// Check if due
var nextRun = status?.NextScheduledRun;
if (nextRun.HasValue && nextRun.Value <= asOf)
{
due.Add(profile.ProfileId);
}
}
return due;
}
/// <inheritdoc />
public TimeSpan? ComputeRetryDelay(ExportRetryPolicy policy, int failureCount)
{
if (failureCount >= policy.MaxRetries)
return null;
var delay = policy.InitialDelaySeconds * Math.Pow(policy.BackoffMultiplier, failureCount);
var cappedDelay = Math.Min(delay, policy.MaxDelaySeconds);
return TimeSpan.FromSeconds(cappedDelay);
}
/// <inheritdoc />
public ExportFailureClass ClassifyFailure(Exception exception)
{
return exception switch
{
// Network-related
SocketException => ExportFailureClass.NetworkError,
HttpRequestException httpEx when IsTransient(httpEx) => ExportFailureClass.Transient,
HttpRequestException httpEx when httpEx.StatusCode == HttpStatusCode.TooManyRequests => ExportFailureClass.RateLimit,
HttpRequestException httpEx when httpEx.StatusCode == HttpStatusCode.Unauthorized => ExportFailureClass.AuthFailure,
HttpRequestException httpEx when httpEx.StatusCode == HttpStatusCode.Forbidden => ExportFailureClass.AuthFailure,
// Timeout
TimeoutException => ExportFailureClass.Transient,
TaskCanceledException tcEx when tcEx.CancellationToken.IsCancellationRequested => ExportFailureClass.Cancelled,
TaskCanceledException => ExportFailureClass.Transient,
OperationCanceledException ocEx when ocEx.CancellationToken.IsCancellationRequested => ExportFailureClass.Cancelled,
// Validation
ArgumentException => ExportFailureClass.ValidationError,
FormatException => ExportFailureClass.ValidationError,
// IO
IOException => ExportFailureClass.Transient,
UnauthorizedAccessException => ExportFailureClass.AuthFailure,
// Default
_ => ExportFailureClass.Unknown
};
}
private static bool IsTransient(HttpRequestException ex)
{
return ex.StatusCode switch
{
HttpStatusCode.RequestTimeout => true,
HttpStatusCode.BadGateway => true,
HttpStatusCode.ServiceUnavailable => true,
HttpStatusCode.GatewayTimeout => true,
null => true, // Connection failures
_ => false
};
}
private CronExpression GetOrParseCron(string expression)
{
return _cronCache.GetOrAdd(expression, expr =>
{
var format = expr.Trim().Split(' ').Length == 6
? CronFormat.IncludeSeconds
: CronFormat.Standard;
return CronExpression.Parse(expr, format);
});
}
}
/// <summary>
/// Store interface for schedule state.
/// </summary>
public interface IExportScheduleStore
{
/// <summary>
/// Gets the current status for a profile.
/// </summary>
Task<ScheduledExportStatus?> GetStatusAsync(Guid profileId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the status by run ID.
/// </summary>
Task<ScheduledExportStatus?> GetStatusByRunAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Records a trigger/run start.
/// </summary>
Task RecordTriggerAsync(
Guid profileId,
Guid runId,
ExportTriggerSource source,
string? correlationId,
string? initiatedBy,
CancellationToken cancellationToken = default);
/// <summary>
/// Records run completion.
/// </summary>
Task RecordRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets the paused state for a profile.
/// </summary>
Task SetPausedAsync(Guid profileId, bool paused, CancellationToken cancellationToken = default);
/// <summary>
/// Gets all scheduled profiles for a tenant.
/// </summary>
Task<IReadOnlyList<ScheduledProfileInfo>> GetScheduledProfilesAsync(
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the next scheduled run time.
/// </summary>
Task UpdateNextScheduledRunAsync(
Guid profileId,
DateTimeOffset? nextRun,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Basic profile info for scheduling.
/// </summary>
public sealed record ScheduledProfileInfo
{
public required Guid ProfileId { get; init; }
public required Guid TenantId { get; init; }
public string? CronExpression { get; init; }
public string Timezone { get; init; } = "UTC";
public bool Enabled { get; init; }
}

View File

@@ -0,0 +1,622 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Configuration for export scheduling.
/// </summary>
public sealed record ExportScheduleConfig
{
/// <summary>
/// Cron expression for scheduled execution (5 or 6 field format).
/// </summary>
[JsonPropertyName("cronExpression")]
public string? CronExpression { get; init; }
/// <summary>
/// Timezone for cron interpretation (IANA format, e.g., "UTC", "America/New_York").
/// </summary>
[JsonPropertyName("timezone")]
public string Timezone { get; init; } = "UTC";
/// <summary>
/// Whether scheduling is enabled.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
/// <summary>
/// Maximum concurrent runs per profile.
/// </summary>
[JsonPropertyName("maxConcurrentRuns")]
public int MaxConcurrentRuns { get; init; } = 1;
/// <summary>
/// Event triggers that initiate runs.
/// </summary>
[JsonPropertyName("eventTriggers")]
public IReadOnlyList<ExportEventTrigger> EventTriggers { get; init; } = [];
/// <summary>
/// Retry configuration for failed runs.
/// </summary>
[JsonPropertyName("retryPolicy")]
public ExportRetryPolicy RetryPolicy { get; init; } = new();
/// <summary>
/// Retention configuration for completed runs.
/// </summary>
[JsonPropertyName("retention")]
public ExportRetentionConfig Retention { get; init; } = new();
}
/// <summary>
/// Event trigger for export runs.
/// </summary>
public sealed record ExportEventTrigger
{
/// <summary>
/// Event type that triggers the export.
/// </summary>
[JsonPropertyName("eventType")]
public required ExportEventType EventType { get; init; }
/// <summary>
/// Filter conditions for the event (JSON-encoded).
/// </summary>
[JsonPropertyName("filterJson")]
public string? FilterJson { get; init; }
/// <summary>
/// Whether this trigger is enabled.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
/// <summary>
/// Debounce window in seconds (coalesce events within this window).
/// </summary>
[JsonPropertyName("debounceSeconds")]
public int DebounceSeconds { get; init; } = 0;
}
/// <summary>
/// Types of events that can trigger exports.
/// </summary>
public enum ExportEventType
{
/// <summary>
/// New advisory ingested.
/// </summary>
AdvisoryIngested = 1,
/// <summary>
/// Advisory updated or withdrawn.
/// </summary>
AdvisoryUpdated = 2,
/// <summary>
/// New VEX document created.
/// </summary>
VexCreated = 3,
/// <summary>
/// VEX document updated.
/// </summary>
VexUpdated = 4,
/// <summary>
/// New SBOM ingested.
/// </summary>
SbomIngested = 5,
/// <summary>
/// Scan completed.
/// </summary>
ScanCompleted = 6,
/// <summary>
/// Policy evaluation completed.
/// </summary>
PolicyEvaluated = 7,
/// <summary>
/// Attestation created.
/// </summary>
AttestationCreated = 8,
/// <summary>
/// Manual trigger via API.
/// </summary>
ApiTrigger = 100,
/// <summary>
/// Webhook trigger.
/// </summary>
WebhookTrigger = 101
}
/// <summary>
/// Retry policy for failed export runs.
/// </summary>
public sealed record ExportRetryPolicy
{
/// <summary>
/// Maximum number of retry attempts.
/// </summary>
[JsonPropertyName("maxRetries")]
public int MaxRetries { get; init; } = 3;
/// <summary>
/// Initial delay between retries in seconds.
/// </summary>
[JsonPropertyName("initialDelaySeconds")]
public int InitialDelaySeconds { get; init; } = 60;
/// <summary>
/// Maximum delay between retries in seconds.
/// </summary>
[JsonPropertyName("maxDelaySeconds")]
public int MaxDelaySeconds { get; init; } = 3600;
/// <summary>
/// Backoff multiplier (exponential backoff).
/// </summary>
[JsonPropertyName("backoffMultiplier")]
public double BackoffMultiplier { get; init; } = 2.0;
/// <summary>
/// Failure types that should be retried.
/// </summary>
[JsonPropertyName("retryableFailures")]
public IReadOnlyList<ExportFailureClass> RetryableFailures { get; init; } =
[
ExportFailureClass.Transient,
ExportFailureClass.RateLimit,
ExportFailureClass.NetworkError
];
}
/// <summary>
/// Retention configuration for export artifacts.
/// </summary>
public sealed record ExportRetentionConfig
{
/// <summary>
/// Retention period in days for successful runs.
/// </summary>
[JsonPropertyName("successfulRunDays")]
public int SuccessfulRunDays { get; init; } = 30;
/// <summary>
/// Retention period in days for failed runs.
/// </summary>
[JsonPropertyName("failedRunDays")]
public int FailedRunDays { get; init; } = 7;
/// <summary>
/// Maximum total runs to retain per profile.
/// </summary>
[JsonPropertyName("maxRunsPerProfile")]
public int MaxRunsPerProfile { get; init; } = 100;
/// <summary>
/// Whether to keep runs with legal hold.
/// </summary>
[JsonPropertyName("respectLegalHold")]
public bool RespectLegalHold { get; init; } = true;
/// <summary>
/// Minimum runs to retain even if expired.
/// </summary>
[JsonPropertyName("minimumRunsToRetain")]
public int MinimumRunsToRetain { get; init; } = 5;
}
/// <summary>
/// Classification of export failures.
/// </summary>
public enum ExportFailureClass
{
/// <summary>
/// Unknown or unclassified failure.
/// </summary>
Unknown = 0,
/// <summary>
/// Transient failure (network timeout, temporary unavailability).
/// </summary>
Transient = 1,
/// <summary>
/// Rate limit exceeded.
/// </summary>
RateLimit = 2,
/// <summary>
/// Network error (connection refused, DNS failure).
/// </summary>
NetworkError = 3,
/// <summary>
/// Permanent failure (invalid configuration, missing data).
/// </summary>
Permanent = 4,
/// <summary>
/// Authentication or authorization failure.
/// </summary>
AuthFailure = 5,
/// <summary>
/// Quota exceeded (storage, API calls).
/// </summary>
QuotaExceeded = 6,
/// <summary>
/// Validation error in input data.
/// </summary>
ValidationError = 7,
/// <summary>
/// Dependency unavailable (KMS, signing service).
/// </summary>
DependencyFailure = 8,
/// <summary>
/// Run was cancelled.
/// </summary>
Cancelled = 9
}
/// <summary>
/// Detailed failure information for export runs.
/// </summary>
public sealed record ExportFailureInfo
{
/// <summary>
/// Failure classification.
/// </summary>
[JsonPropertyName("class")]
public required ExportFailureClass Class { get; init; }
/// <summary>
/// Error code (domain-specific).
/// </summary>
[JsonPropertyName("errorCode")]
public string? ErrorCode { get; init; }
/// <summary>
/// Human-readable error message.
/// </summary>
[JsonPropertyName("message")]
public required string Message { get; init; }
/// <summary>
/// Detailed error information (stack trace, inner errors).
/// </summary>
[JsonPropertyName("details")]
public string? Details { get; init; }
/// <summary>
/// When the failure occurred.
/// </summary>
[JsonPropertyName("occurredAt")]
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Whether retry is recommended.
/// </summary>
[JsonPropertyName("retryable")]
public bool Retryable { get; init; }
/// <summary>
/// Suggested retry delay in seconds.
/// </summary>
[JsonPropertyName("retryAfterSeconds")]
public int? RetryAfterSeconds { get; init; }
}
/// <summary>
/// Status of a scheduled export.
/// </summary>
public sealed record ScheduledExportStatus
{
/// <summary>
/// Profile ID.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// Last successful run timestamp.
/// </summary>
public DateTimeOffset? LastSuccessfulRun { get; init; }
/// <summary>
/// Last failed run timestamp.
/// </summary>
public DateTimeOffset? LastFailedRun { get; init; }
/// <summary>
/// Next scheduled run timestamp.
/// </summary>
public DateTimeOffset? NextScheduledRun { get; init; }
/// <summary>
/// Current retry count for consecutive failures.
/// </summary>
public int ConsecutiveFailures { get; init; }
/// <summary>
/// Whether the profile is currently executing.
/// </summary>
public bool IsRunning { get; init; }
/// <summary>
/// Current run ID if running.
/// </summary>
public Guid? CurrentRunId { get; init; }
/// <summary>
/// Whether scheduling is paused due to failures.
/// </summary>
public bool IsPausedDueToFailures { get; init; }
/// <summary>
/// Last failure info if any.
/// </summary>
public ExportFailureInfo? LastFailure { get; init; }
}
/// <summary>
/// Request to trigger an export.
/// </summary>
public sealed record ExportTriggerRequest
{
/// <summary>
/// Profile ID to execute.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// Trigger source.
/// </summary>
public required ExportTriggerSource Source { get; init; }
/// <summary>
/// Correlation ID for tracing.
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// User or service that initiated the trigger.
/// </summary>
public string? InitiatedBy { get; init; }
/// <summary>
/// Event data for event-triggered exports.
/// </summary>
public string? EventDataJson { get; init; }
/// <summary>
/// Override configuration (JSON).
/// </summary>
public string? OverrideConfigJson { get; init; }
/// <summary>
/// Whether to force run even if profile is paused.
/// </summary>
public bool Force { get; init; }
/// <summary>
/// Priority hint (higher = more urgent).
/// </summary>
public int Priority { get; init; }
}
/// <summary>
/// Source of export trigger.
/// </summary>
public enum ExportTriggerSource
{
/// <summary>
/// Scheduled via cron.
/// </summary>
Scheduled = 1,
/// <summary>
/// Triggered by event.
/// </summary>
Event = 2,
/// <summary>
/// Manual trigger via API.
/// </summary>
Manual = 3,
/// <summary>
/// Retry of a failed run.
/// </summary>
Retry = 4,
/// <summary>
/// System-initiated (e.g., startup catch-up).
/// </summary>
System = 5
}
/// <summary>
/// Result of a trigger request.
/// </summary>
public sealed record ExportTriggerResult
{
/// <summary>
/// Whether the trigger was accepted.
/// </summary>
public required bool Accepted { get; init; }
/// <summary>
/// Run ID if a new run was created.
/// </summary>
public Guid? RunId { get; init; }
/// <summary>
/// Reason if not accepted.
/// </summary>
public string? RejectionReason { get; init; }
/// <summary>
/// Rejection code.
/// </summary>
public ExportTriggerRejection? RejectionCode { get; init; }
public static ExportTriggerResult Success(Guid runId)
=> new() { Accepted = true, RunId = runId };
public static ExportTriggerResult Rejected(ExportTriggerRejection code, string reason)
=> new() { Accepted = false, RejectionCode = code, RejectionReason = reason };
}
/// <summary>
/// Reasons for rejecting a trigger.
/// </summary>
public enum ExportTriggerRejection
{
/// <summary>
/// Profile not found.
/// </summary>
ProfileNotFound = 1,
/// <summary>
/// Profile is not active.
/// </summary>
ProfileNotActive = 2,
/// <summary>
/// Maximum concurrent runs reached.
/// </summary>
ConcurrencyLimitReached = 3,
/// <summary>
/// Profile is paused due to failures.
/// </summary>
PausedDueToFailures = 4,
/// <summary>
/// Event trigger not enabled.
/// </summary>
TriggerNotEnabled = 5,
/// <summary>
/// Debounce window active.
/// </summary>
DebouncePending = 6,
/// <summary>
/// Rate limit exceeded.
/// </summary>
RateLimited = 7,
/// <summary>
/// Invalid configuration.
/// </summary>
InvalidConfiguration = 8
}
/// <summary>
/// Request for retention pruning.
/// </summary>
public sealed record RetentionPruneRequest
{
/// <summary>
/// Tenant ID to prune.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Optional profile ID to restrict pruning.
/// </summary>
public Guid? ProfileId { get; init; }
/// <summary>
/// Whether to actually delete (false = dry run).
/// </summary>
public bool Execute { get; init; } = true;
/// <summary>
/// Override retention config.
/// </summary>
public ExportRetentionConfig? OverrideRetention { get; init; }
}
/// <summary>
/// Result of retention pruning.
/// </summary>
public sealed record RetentionPruneResult
{
/// <summary>
/// Whether pruning was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Number of runs pruned.
/// </summary>
public int RunsPruned { get; init; }
/// <summary>
/// Number of artifacts deleted.
/// </summary>
public int ArtifactsDeleted { get; init; }
/// <summary>
/// Bytes freed.
/// </summary>
public long BytesFreed { get; init; }
/// <summary>
/// Runs that were skipped due to legal hold.
/// </summary>
public int RunsSkippedLegalHold { get; init; }
/// <summary>
/// Errors encountered during pruning.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
/// <summary>
/// Details of pruned runs.
/// </summary>
public IReadOnlyList<PrunedRunInfo> PrunedRuns { get; init; } = [];
}
/// <summary>
/// Information about a pruned run.
/// </summary>
public sealed record PrunedRunInfo
{
/// <summary>
/// Run ID.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Profile ID.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// When the run completed.
/// </summary>
public required DateTimeOffset CompletedAt { get; init; }
/// <summary>
/// Number of artifacts deleted.
/// </summary>
public int ArtifactsDeleted { get; init; }
/// <summary>
/// Bytes freed from this run.
/// </summary>
public long BytesFreed { get; init; }
}

View File

@@ -0,0 +1,44 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Extension methods for registering export scheduling services.
/// </summary>
public static class ExportSchedulingServiceCollectionExtensions
{
/// <summary>
/// Registers export scheduling services with in-memory stores.
/// </summary>
public static IServiceCollection AddExportScheduling(this IServiceCollection services)
{
// Register stores (in-memory by default)
services.AddSingleton<IExportScheduleStore, InMemoryExportScheduleStore>();
services.AddSingleton<IExportRetentionStore, InMemoryExportRetentionStore>();
// Register services
services.AddSingleton<IExportSchedulerService, ExportSchedulerService>();
services.AddSingleton<IExportRetentionService, ExportRetentionService>();
return services;
}
/// <summary>
/// Registers export scheduling services with custom stores.
/// </summary>
public static IServiceCollection AddExportScheduling<TScheduleStore, TRetentionStore>(
this IServiceCollection services)
where TScheduleStore : class, IExportScheduleStore
where TRetentionStore : class, IExportRetentionStore
{
// Register custom stores
services.AddSingleton<IExportScheduleStore, TScheduleStore>();
services.AddSingleton<IExportRetentionStore, TRetentionStore>();
// Register services
services.AddSingleton<IExportSchedulerService, ExportSchedulerService>();
services.AddSingleton<IExportRetentionService, ExportRetentionService>();
return services;
}
}

View File

@@ -0,0 +1,145 @@
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Service for managing export scheduling.
/// </summary>
public interface IExportSchedulerService
{
/// <summary>
/// Gets the next scheduled run time for a profile.
/// </summary>
/// <param name="profileId">Profile ID.</param>
/// <param name="cronExpression">Cron expression.</param>
/// <param name="timezone">Timezone name.</param>
/// <param name="from">Start time for calculation.</param>
/// <returns>Next run time, or null if no next occurrence.</returns>
DateTimeOffset? GetNextScheduledTime(
Guid profileId,
string cronExpression,
string timezone,
DateTimeOffset from);
/// <summary>
/// Triggers an export run.
/// </summary>
/// <param name="request">Trigger request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Trigger result.</returns>
Task<ExportTriggerResult> TriggerAsync(
ExportTriggerRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the status of a scheduled export.
/// </summary>
/// <param name="profileId">Profile ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Schedule status.</returns>
Task<ScheduledExportStatus?> GetStatusAsync(
Guid profileId,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the status after a run completes.
/// </summary>
/// <param name="runId">Run ID.</param>
/// <param name="success">Whether the run succeeded.</param>
/// <param name="failure">Failure info if failed.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UpdateRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates a cron expression.
/// </summary>
/// <param name="cronExpression">Cron expression to validate.</param>
/// <returns>Validation result with error message if invalid.</returns>
(bool IsValid, string? ErrorMessage) ValidateCronExpression(string cronExpression);
/// <summary>
/// Gets profiles due for scheduled execution.
/// </summary>
/// <param name="tenantId">Tenant ID.</param>
/// <param name="asOf">Time to check against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of profile IDs due for execution.</returns>
Task<IReadOnlyList<Guid>> GetProfilesDueForExecutionAsync(
Guid tenantId,
DateTimeOffset asOf,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes retry delay based on policy and failure count.
/// </summary>
/// <param name="policy">Retry policy.</param>
/// <param name="failureCount">Number of consecutive failures.</param>
/// <returns>Delay before next retry, or null if no more retries.</returns>
TimeSpan? ComputeRetryDelay(ExportRetryPolicy policy, int failureCount);
/// <summary>
/// Classifies an exception into a failure class.
/// </summary>
/// <param name="exception">The exception.</param>
/// <returns>Failure classification.</returns>
ExportFailureClass ClassifyFailure(Exception exception);
}
/// <summary>
/// Service for managing export retention.
/// </summary>
public interface IExportRetentionService
{
/// <summary>
/// Prunes expired runs and artifacts.
/// </summary>
/// <param name="request">Prune request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Prune result.</returns>
Task<RetentionPruneResult> PruneAsync(
RetentionPruneRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets runs eligible for pruning.
/// </summary>
/// <param name="tenantId">Tenant ID.</param>
/// <param name="profileId">Optional profile ID.</param>
/// <param name="retention">Retention config.</param>
/// <param name="asOf">Time to check against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of run IDs eligible for pruning.</returns>
Task<IReadOnlyList<Guid>> GetRunsEligibleForPruningAsync(
Guid tenantId,
Guid? profileId,
ExportRetentionConfig retention,
DateTimeOffset asOf,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets or removes legal hold on a run.
/// </summary>
/// <param name="runId">Run ID.</param>
/// <param name="hold">Whether to hold or release.</param>
/// <param name="reason">Reason for the hold.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task SetLegalHoldAsync(
Guid runId,
bool hold,
string? reason = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes expiration time for a new run.
/// </summary>
/// <param name="retention">Retention config.</param>
/// <param name="completedAt">When the run completed.</param>
/// <param name="success">Whether the run succeeded.</param>
/// <returns>Expiration timestamp.</returns>
DateTimeOffset ComputeExpiration(
ExportRetentionConfig retention,
DateTimeOffset completedAt,
bool success);
}

View File

@@ -0,0 +1,308 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// In-memory implementation of the schedule store for testing.
/// </summary>
public sealed class InMemoryExportScheduleStore : IExportScheduleStore
{
private readonly ConcurrentDictionary<Guid, ScheduledExportStatus> _statusByProfile = new();
private readonly ConcurrentDictionary<Guid, Guid> _runToProfile = new();
private readonly ConcurrentDictionary<Guid, List<ScheduledProfileInfo>> _profilesByTenant = new();
private readonly object _lock = new();
/// <summary>
/// Adds a profile for testing.
/// </summary>
public void AddProfile(ScheduledProfileInfo profile)
{
lock (_lock)
{
if (!_profilesByTenant.TryGetValue(profile.TenantId, out var profiles))
{
profiles = [];
_profilesByTenant[profile.TenantId] = profiles;
}
profiles.Add(profile);
// Initialize status
_statusByProfile[profile.ProfileId] = new ScheduledExportStatus
{
ProfileId = profile.ProfileId
};
}
}
/// <summary>
/// Sets status for testing.
/// </summary>
public void SetStatus(ScheduledExportStatus status)
{
_statusByProfile[status.ProfileId] = status;
// Also update run-to-profile mapping if a current run is set
if (status.CurrentRunId.HasValue)
{
_runToProfile[status.CurrentRunId.Value] = status.ProfileId;
}
}
public Task<ScheduledExportStatus?> GetStatusAsync(Guid profileId, CancellationToken cancellationToken = default)
{
_statusByProfile.TryGetValue(profileId, out var status);
return Task.FromResult(status);
}
public Task<ScheduledExportStatus?> GetStatusByRunAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runToProfile.TryGetValue(runId, out var profileId))
{
_statusByProfile.TryGetValue(profileId, out var status);
return Task.FromResult(status);
}
return Task.FromResult<ScheduledExportStatus?>(null);
}
public Task RecordTriggerAsync(
Guid profileId,
Guid runId,
ExportTriggerSource source,
string? correlationId,
string? initiatedBy,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
_runToProfile[runId] = profileId;
_statusByProfile.AddOrUpdate(
profileId,
_ => new ScheduledExportStatus
{
ProfileId = profileId,
IsRunning = true,
CurrentRunId = runId
},
(_, existing) => existing with
{
IsRunning = true,
CurrentRunId = runId
});
}
return Task.CompletedTask;
}
public Task RecordRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure,
CancellationToken cancellationToken = default)
{
if (!_runToProfile.TryGetValue(runId, out var profileId))
return Task.CompletedTask;
lock (_lock)
{
if (_statusByProfile.TryGetValue(profileId, out var existing))
{
var now = DateTimeOffset.UtcNow;
var newFailureCount = success ? 0 : existing.ConsecutiveFailures + 1;
_statusByProfile[profileId] = existing with
{
IsRunning = false,
CurrentRunId = null,
LastSuccessfulRun = success ? now : existing.LastSuccessfulRun,
LastFailedRun = success ? existing.LastFailedRun : now,
ConsecutiveFailures = newFailureCount,
LastFailure = failure
};
}
}
return Task.CompletedTask;
}
public Task SetPausedAsync(Guid profileId, bool paused, CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (_statusByProfile.TryGetValue(profileId, out var existing))
{
_statusByProfile[profileId] = existing with
{
IsPausedDueToFailures = paused
};
}
}
return Task.CompletedTask;
}
public Task<IReadOnlyList<ScheduledProfileInfo>> GetScheduledProfilesAsync(
Guid tenantId,
CancellationToken cancellationToken = default)
{
_profilesByTenant.TryGetValue(tenantId, out var profiles);
return Task.FromResult<IReadOnlyList<ScheduledProfileInfo>>(profiles ?? []);
}
public Task UpdateNextScheduledRunAsync(
Guid profileId,
DateTimeOffset? nextRun,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (_statusByProfile.TryGetValue(profileId, out var existing))
{
_statusByProfile[profileId] = existing with
{
NextScheduledRun = nextRun
};
}
}
return Task.CompletedTask;
}
/// <summary>
/// Clears all state.
/// </summary>
public void Clear()
{
_statusByProfile.Clear();
_runToProfile.Clear();
_profilesByTenant.Clear();
}
}
/// <summary>
/// In-memory implementation of the retention store for testing.
/// </summary>
public sealed class InMemoryExportRetentionStore : IExportRetentionStore
{
private readonly ConcurrentDictionary<Guid, DetailedRunInfo> _runs = new();
private readonly ConcurrentDictionary<Guid, List<Guid>> _runsByProfile = new();
private readonly ConcurrentDictionary<Guid, List<Guid>> _profilesByTenant = new();
private readonly ConcurrentDictionary<Guid, (bool Hold, string? Reason)> _legalHolds = new();
private readonly object _lock = new();
/// <summary>
/// Adds a run for testing.
/// </summary>
public void AddRun(DetailedRunInfo run, Guid tenantId)
{
lock (_lock)
{
_runs[run.RunId] = run;
if (!_runsByProfile.TryGetValue(run.ProfileId, out var runs))
{
runs = [];
_runsByProfile[run.ProfileId] = runs;
}
runs.Add(run.RunId);
if (!_profilesByTenant.TryGetValue(tenantId, out var profiles))
{
profiles = [];
_profilesByTenant[tenantId] = profiles;
}
if (!profiles.Contains(run.ProfileId))
{
profiles.Add(run.ProfileId);
}
}
}
public Task<IReadOnlyList<Guid>> GetProfileIdsAsync(Guid tenantId, CancellationToken cancellationToken = default)
{
_profilesByTenant.TryGetValue(tenantId, out var profiles);
return Task.FromResult<IReadOnlyList<Guid>>(profiles ?? []);
}
public Task<IReadOnlyList<RetentionRunInfo>> GetRunsForProfileAsync(Guid profileId, CancellationToken cancellationToken = default)
{
var result = new List<RetentionRunInfo>();
if (_runsByProfile.TryGetValue(profileId, out var runIds))
{
foreach (var runId in runIds)
{
if (_runs.TryGetValue(runId, out var run))
{
_legalHolds.TryGetValue(runId, out var hold);
result.Add(new RetentionRunInfo
{
RunId = run.RunId,
ProfileId = run.ProfileId,
CompletedAt = run.CompletedAt,
ExpiresAt = run.CompletedAt.AddDays(30), // Default expiry
HasLegalHold = hold.Hold
});
}
}
}
return Task.FromResult<IReadOnlyList<RetentionRunInfo>>(result);
}
public Task<DetailedRunInfo?> GetRunInfoAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run))
{
_legalHolds.TryGetValue(runId, out var hold);
return Task.FromResult<DetailedRunInfo?>(run with
{
HasLegalHold = hold.Hold,
LegalHoldReason = hold.Reason
});
}
return Task.FromResult<DetailedRunInfo?>(null);
}
public Task<ArtifactDeleteResult> DeleteRunArtifactsAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run))
{
return Task.FromResult(new ArtifactDeleteResult
{
ArtifactsDeleted = run.ArtifactCount,
BytesFreed = run.TotalSizeBytes
});
}
return Task.FromResult(new ArtifactDeleteResult());
}
public Task DeleteRunAsync(Guid runId, CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (_runs.TryRemove(runId, out var run))
{
if (_runsByProfile.TryGetValue(run.ProfileId, out var runs))
{
runs.Remove(runId);
}
}
_legalHolds.TryRemove(runId, out _);
}
return Task.CompletedTask;
}
public Task SetLegalHoldAsync(Guid runId, bool hold, string? reason, CancellationToken cancellationToken = default)
{
_legalHolds[runId] = (hold, reason);
return Task.CompletedTask;
}
/// <summary>
/// Clears all state.
/// </summary>
public void Clear()
{
_runs.Clear();
_runsByProfile.Clear();
_profilesByTenant.Clear();
_legalHolds.Clear();
}
}

View File

@@ -12,6 +12,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Cronos" Version="0.9.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>

View File

@@ -0,0 +1,134 @@
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Service for enforcing tenant scope in export operations.
/// </summary>
public interface ITenantScopeEnforcer
{
/// <summary>
/// Checks whether an export operation is allowed under tenant scope rules.
/// </summary>
Task<TenantScopeCheckResult> CheckScopeAsync(
TenantScopeCheckRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Creates a tenant-scoped path for an artifact.
/// </summary>
TenantScopedPath CreateScopedPath(
string tenantId,
string? projectId,
string originalPath);
/// <summary>
/// Parses a scoped path back into tenant/project/relative components.
/// </summary>
TenantScopedPath? ParseScopedPath(string scopedPath);
/// <summary>
/// Validates tenant and project IDs.
/// </summary>
TenantScopeValidationResult ValidateIds(string tenantId, string? projectId = null);
/// <summary>
/// Creates provenance context for a tenant-scoped export.
/// </summary>
TenantProvenanceContext CreateProvenanceContext(
string tenantId,
string? projectId,
string exportRunId,
IReadOnlyList<TenantScopedManifestEntry> entries,
IReadOnlyList<CrossTenantRef>? crossTenantRefs = null);
/// <summary>
/// Generates the scope prefix for a tenant/project combination.
/// </summary>
string GetScopePrefix(string tenantId, string? projectId = null);
/// <summary>
/// Checks if a path belongs to a specific tenant.
/// </summary>
bool IsPathOwnedByTenant(string path, string tenantId);
/// <summary>
/// Gets the configuration for a tenant (may have overrides).
/// </summary>
TenantScopeConfig GetConfigForTenant(string tenantId);
}
/// <summary>
/// Store for tenant scope configurations.
/// </summary>
public interface ITenantScopeConfigStore
{
/// <summary>
/// Gets the global default configuration.
/// </summary>
TenantScopeConfig GetDefaultConfig();
/// <summary>
/// Gets configuration for a specific tenant (with any overrides applied).
/// </summary>
Task<TenantScopeConfig?> GetTenantConfigAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves configuration for a specific tenant.
/// </summary>
Task SaveTenantConfigAsync(
string tenantId,
TenantScopeConfig config,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a tenant is in the global cross-tenant whitelist.
/// </summary>
Task<bool> IsInGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Adds a tenant to the global cross-tenant whitelist.
/// </summary>
Task AddToGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Removes a tenant from the global cross-tenant whitelist.
/// </summary>
Task RemoveFromGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Store for tenant resource ownership tracking.
/// </summary>
public interface ITenantResourceStore
{
/// <summary>
/// Gets the tenant ID that owns a resource.
/// </summary>
Task<string?> GetResourceTenantAsync(
string resourceId,
CancellationToken cancellationToken = default);
/// <summary>
/// Registers resource ownership for a tenant.
/// </summary>
Task RegisterResourceAsync(
string tenantId,
string resourceId,
string resourceType,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if all resources belong to the specified tenant.
/// </summary>
Task<(bool AllBelong, IReadOnlyList<string> ViolatingResources)> CheckResourceOwnershipAsync(
string tenantId,
IReadOnlyList<string> resourceIds,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,144 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// In-memory implementation of tenant scope config store for testing.
/// </summary>
public sealed class InMemoryTenantScopeConfigStore : ITenantScopeConfigStore
{
private readonly ConcurrentDictionary<string, TenantScopeConfig> _configs = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, bool> _globalWhitelist = new(StringComparer.OrdinalIgnoreCase);
private TenantScopeConfig _defaultConfig = new();
/// <summary>
/// Sets the default configuration.
/// </summary>
public void SetDefaultConfig(TenantScopeConfig config)
{
_defaultConfig = config;
}
/// <inheritdoc />
public TenantScopeConfig GetDefaultConfig() => _defaultConfig;
/// <inheritdoc />
public Task<TenantScopeConfig?> GetTenantConfigAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
_configs.TryGetValue(tenantId, out var config);
return Task.FromResult(config);
}
/// <inheritdoc />
public Task SaveTenantConfigAsync(
string tenantId,
TenantScopeConfig config,
CancellationToken cancellationToken = default)
{
_configs[tenantId] = config;
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<bool> IsInGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
return Task.FromResult(_globalWhitelist.ContainsKey(tenantId));
}
/// <inheritdoc />
public Task AddToGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
_globalWhitelist[tenantId] = true;
return Task.CompletedTask;
}
/// <inheritdoc />
public Task RemoveFromGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
_globalWhitelist.TryRemove(tenantId, out _);
return Task.CompletedTask;
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_configs.Clear();
_globalWhitelist.Clear();
_defaultConfig = new TenantScopeConfig();
}
}
/// <summary>
/// In-memory implementation of tenant resource store for testing.
/// </summary>
public sealed class InMemoryTenantResourceStore : ITenantResourceStore
{
private readonly ConcurrentDictionary<string, ResourceInfo> _resources = new(StringComparer.OrdinalIgnoreCase);
/// <inheritdoc />
public Task<string?> GetResourceTenantAsync(
string resourceId,
CancellationToken cancellationToken = default)
{
_resources.TryGetValue(resourceId, out var info);
return Task.FromResult(info?.TenantId);
}
/// <inheritdoc />
public Task RegisterResourceAsync(
string tenantId,
string resourceId,
string resourceType,
CancellationToken cancellationToken = default)
{
_resources[resourceId] = new ResourceInfo(tenantId, resourceType);
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<(bool AllBelong, IReadOnlyList<string> ViolatingResources)> CheckResourceOwnershipAsync(
string tenantId,
IReadOnlyList<string> resourceIds,
CancellationToken cancellationToken = default)
{
var violating = new List<string>();
foreach (var resourceId in resourceIds)
{
if (_resources.TryGetValue(resourceId, out var info))
{
if (!string.Equals(info.TenantId, tenantId, StringComparison.OrdinalIgnoreCase))
{
violating.Add(resourceId);
}
}
else
{
// Resource not registered - could be violation depending on policy
// For now, unregistered resources are allowed (may belong to tenant)
}
}
return Task.FromResult<(bool, IReadOnlyList<string>)>((violating.Count == 0, violating));
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_resources.Clear();
}
private sealed record ResourceInfo(string TenantId, string ResourceType);
}

View File

@@ -0,0 +1,324 @@
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Default implementation of tenant scope enforcer.
/// </summary>
public sealed class TenantScopeEnforcer : ITenantScopeEnforcer
{
private readonly ITenantScopeConfigStore _configStore;
private readonly ITenantResourceStore _resourceStore;
private readonly ILogger<TenantScopeEnforcer> _logger;
private readonly TimeProvider _timeProvider;
public TenantScopeEnforcer(
ITenantScopeConfigStore configStore,
ITenantResourceStore resourceStore,
ILogger<TenantScopeEnforcer> logger,
TimeProvider? timeProvider = null)
{
_configStore = configStore ?? throw new ArgumentNullException(nameof(configStore));
_resourceStore = resourceStore ?? throw new ArgumentNullException(nameof(resourceStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<TenantScopeCheckResult> CheckScopeAsync(
TenantScopeCheckRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
// Validate tenant IDs
var requestingValidation = ValidateIds(request.RequestingTenantId, request.RequestingProjectId);
if (!requestingValidation.IsValid)
{
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.InvalidTenantId,
requestingValidation.Errors[0].Message);
}
var targetValidation = ValidateIds(request.TargetTenantId, request.TargetProjectId);
if (!targetValidation.IsValid)
{
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.InvalidTenantId,
targetValidation.Errors[0].Message);
}
// Get config for requesting tenant
var config = await GetConfigOrDefaultAsync(request.RequestingTenantId, cancellationToken);
if (!config.Enabled)
{
// Scope enforcement disabled - allow everything
_logger.LogDebug(
"Tenant scope enforcement disabled for tenant {TenantId}",
request.RequestingTenantId);
return TenantScopeCheckResult.Allow();
}
// Check if this is a same-tenant operation
var isCrossTenant = !string.Equals(
request.RequestingTenantId,
request.TargetTenantId,
StringComparison.OrdinalIgnoreCase);
if (!isCrossTenant)
{
// Same tenant - check project scope if applicable
if (request.RequestingProjectId is not null && request.TargetProjectId is not null &&
!string.Equals(request.RequestingProjectId, request.TargetProjectId, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Project scope mismatch: requesting={Requesting}, target={Target}",
request.RequestingProjectId,
request.TargetProjectId);
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.ProjectScopeViolation,
$"Cannot export from project {request.RequestingProjectId} to project {request.TargetProjectId}");
}
// Check resource ownership if resources specified
if (request.ResourceIds.Count > 0)
{
var (allBelong, violating) = await _resourceStore.CheckResourceOwnershipAsync(
request.RequestingTenantId,
request.ResourceIds,
cancellationToken);
if (!allBelong)
{
_logger.LogWarning(
"Resource scope violation for tenant {TenantId}: {ViolatingCount} resources",
request.RequestingTenantId,
violating.Count);
return TenantScopeCheckResult.DenyResources(
violating,
$"Resources do not belong to tenant {request.RequestingTenantId}");
}
}
return TenantScopeCheckResult.Allow();
}
// Cross-tenant operation
_logger.LogInformation(
"Cross-tenant operation: {RequestingTenant} -> {TargetTenant} ({Operation})",
request.RequestingTenantId,
request.TargetTenantId,
request.Operation);
// Check strict isolation
if (config.StrictIsolation)
{
// Check if target is in allowed targets list
if (!config.AllowedTargetTenants.Contains(request.TargetTenantId, StringComparer.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Cross-tenant denied by strict isolation: {Requesting} -> {Target}",
request.RequestingTenantId,
request.TargetTenantId);
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.StrictIsolationViolation,
$"Strict isolation prevents export from tenant {request.RequestingTenantId} to {request.TargetTenantId}");
}
return TenantScopeCheckResult.AllowCrossTenant(viaWhitelist: false);
}
// Check whitelist
if (config.CrossTenantWhitelist.Contains(request.TargetTenantId, StringComparer.OrdinalIgnoreCase))
{
return TenantScopeCheckResult.AllowCrossTenant(viaWhitelist: true);
}
// Check global whitelist
var inGlobalWhitelist = await _configStore.IsInGlobalWhitelistAsync(
request.TargetTenantId,
cancellationToken);
if (inGlobalWhitelist)
{
return TenantScopeCheckResult.AllowCrossTenant(viaWhitelist: true);
}
// Not in any whitelist
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.TargetTenantNotWhitelisted,
$"Target tenant {request.TargetTenantId} is not whitelisted for cross-tenant exports");
}
public TenantScopedPath CreateScopedPath(
string tenantId,
string? projectId,
string originalPath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(originalPath);
var config = GetConfigForTenant(tenantId);
var prefix = GetScopePrefix(tenantId, projectId, config);
// Normalize and combine paths
var normalizedOriginal = originalPath.TrimStart('/');
var scopedPath = $"{prefix}/{normalizedOriginal}";
return new TenantScopedPath
{
OriginalPath = originalPath,
ScopedPath = scopedPath,
TenantId = tenantId,
ProjectId = projectId ?? config.DefaultProjectId,
RelativePath = normalizedOriginal
};
}
public TenantScopedPath? ParseScopedPath(string scopedPath)
{
if (string.IsNullOrWhiteSpace(scopedPath))
return null;
var config = _configStore.GetDefaultConfig();
// Try to extract tenant and project from path
// Expected format: tenants/{tenantId}/projects/{projectId}/... or tenants/{tenantId}/...
var tenantMatch = Regex.Match(scopedPath, @"^tenants/([^/]+)(?:/projects/([^/]+))?/(.+)$");
if (tenantMatch.Success)
{
var tenantId = tenantMatch.Groups[1].Value;
var projectId = tenantMatch.Groups[2].Success ? tenantMatch.Groups[2].Value : null;
var relativePath = tenantMatch.Groups[3].Value;
return new TenantScopedPath
{
OriginalPath = relativePath,
ScopedPath = scopedPath,
TenantId = tenantId,
ProjectId = projectId,
RelativePath = relativePath
};
}
// Try simpler format: {tenantId}/...
var simpleMatch = Regex.Match(scopedPath, @"^([^/]+)/(.+)$");
if (simpleMatch.Success)
{
var potentialTenantId = simpleMatch.Groups[1].Value;
if (TenantIdValidator.IsValid(potentialTenantId))
{
return new TenantScopedPath
{
OriginalPath = simpleMatch.Groups[2].Value,
ScopedPath = scopedPath,
TenantId = potentialTenantId,
ProjectId = null,
RelativePath = simpleMatch.Groups[2].Value
};
}
}
return null;
}
public TenantScopeValidationResult ValidateIds(string tenantId, string? projectId = null)
{
var tenantValidation = TenantIdValidator.Validate(tenantId);
if (!tenantValidation.IsValid)
{
return tenantValidation;
}
// Project ID validation (same rules, but optional)
if (projectId is not null && !TenantIdValidator.IsValid(projectId))
{
return TenantScopeValidationResult.Invalid(new TenantScopeValidationError
{
Code = TenantScopeErrorCodes.InvalidProjectId,
Message = "Project ID must be 3-64 alphanumeric characters (hyphens/underscores allowed) or a valid GUID",
Field = "projectId"
});
}
return TenantScopeValidationResult.Valid();
}
public TenantProvenanceContext CreateProvenanceContext(
string tenantId,
string? projectId,
string exportRunId,
IReadOnlyList<TenantScopedManifestEntry> entries,
IReadOnlyList<CrossTenantRef>? crossTenantRefs = null)
{
var scopePrefix = GetScopePrefix(tenantId, projectId);
return new TenantProvenanceContext
{
TenantId = tenantId,
ProjectId = projectId,
ExportRunId = exportRunId,
ExportedAt = _timeProvider.GetUtcNow(),
ScopePrefix = scopePrefix,
ArtifactCount = entries.Count,
TotalSizeBytes = entries.Sum(e => e.SizeBytes),
CrossTenantRefs = crossTenantRefs
};
}
public string GetScopePrefix(string tenantId, string? projectId = null)
{
var config = GetConfigForTenant(tenantId);
return GetScopePrefix(tenantId, projectId, config);
}
private static string GetScopePrefix(string tenantId, string? projectId, TenantScopeConfig config)
{
var prefix = config.PathPrefixPattern.Replace("{tenantId}", tenantId);
if (config.IncludeProjectInPath && !string.IsNullOrEmpty(projectId))
{
var projectPrefix = config.ProjectPrefixPattern.Replace("{projectId}", projectId);
prefix = $"{prefix}/{projectPrefix}";
}
else if (config.IncludeProjectInPath)
{
var projectPrefix = config.ProjectPrefixPattern.Replace("{projectId}", config.DefaultProjectId);
prefix = $"{prefix}/{projectPrefix}";
}
return prefix.TrimEnd('/');
}
public bool IsPathOwnedByTenant(string path, string tenantId)
{
if (string.IsNullOrWhiteSpace(path) || string.IsNullOrWhiteSpace(tenantId))
return false;
var parsed = ParseScopedPath(path);
if (parsed is null)
return false;
return string.Equals(parsed.TenantId, tenantId, StringComparison.OrdinalIgnoreCase);
}
public TenantScopeConfig GetConfigForTenant(string tenantId)
{
// Synchronous fallback - in production would cache
var config = _configStore.GetTenantConfigAsync(tenantId, default).GetAwaiter().GetResult();
return config ?? _configStore.GetDefaultConfig();
}
private async Task<TenantScopeConfig> GetConfigOrDefaultAsync(
string tenantId,
CancellationToken cancellationToken)
{
var config = await _configStore.GetTenantConfigAsync(tenantId, cancellationToken);
return config ?? _configStore.GetDefaultConfig();
}
}

View File

@@ -0,0 +1,395 @@
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Configuration for tenant scope enforcement in exports.
/// </summary>
public sealed record TenantScopeConfig
{
/// <summary>
/// Whether tenant scope enforcement is enabled.
/// </summary>
public bool Enabled { get; init; } = true;
/// <summary>
/// Pattern for tenant prefix in paths (e.g., "tenants/{tenantId}" or "{tenantId}").
/// </summary>
public string PathPrefixPattern { get; init; } = "tenants/{tenantId}";
/// <summary>
/// Pattern for project prefix in paths (appended after tenant).
/// </summary>
public string ProjectPrefixPattern { get; init; } = "projects/{projectId}";
/// <summary>
/// Whether to include project in path prefix.
/// </summary>
public bool IncludeProjectInPath { get; init; } = true;
/// <summary>
/// Whether to enforce strict tenant isolation (no cross-tenant refs).
/// </summary>
public bool StrictIsolation { get; init; } = true;
/// <summary>
/// List of tenant IDs allowed for cross-tenant exports.
/// </summary>
public IReadOnlyList<string> CrossTenantWhitelist { get; init; } = [];
/// <summary>
/// List of target tenant IDs this tenant can export to.
/// </summary>
public IReadOnlyList<string> AllowedTargetTenants { get; init; } = [];
/// <summary>
/// Default project ID when none is specified.
/// </summary>
public string DefaultProjectId { get; init; } = "default";
}
/// <summary>
/// Tenant-scoped artifact path information.
/// </summary>
public sealed record TenantScopedPath
{
/// <summary>
/// The original path before tenant scoping.
/// </summary>
public required string OriginalPath { get; init; }
/// <summary>
/// The tenant-scoped path (prefixed with tenant/project).
/// </summary>
public required string ScopedPath { get; init; }
/// <summary>
/// The tenant ID.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// The project ID.
/// </summary>
public string? ProjectId { get; init; }
/// <summary>
/// Path relative to tenant/project prefix.
/// </summary>
public required string RelativePath { get; init; }
}
/// <summary>
/// Request to check tenant scope for an export operation.
/// </summary>
public sealed record TenantScopeCheckRequest
{
/// <summary>
/// The requesting tenant ID.
/// </summary>
public required string RequestingTenantId { get; init; }
/// <summary>
/// The requesting project ID (optional).
/// </summary>
public string? RequestingProjectId { get; init; }
/// <summary>
/// The target tenant ID for the export.
/// </summary>
public required string TargetTenantId { get; init; }
/// <summary>
/// The target project ID (optional).
/// </summary>
public string? TargetProjectId { get; init; }
/// <summary>
/// Resource IDs being accessed.
/// </summary>
public IReadOnlyList<string> ResourceIds { get; init; } = [];
/// <summary>
/// The operation being performed.
/// </summary>
public TenantScopeOperation Operation { get; init; } = TenantScopeOperation.Export;
}
/// <summary>
/// Result of a tenant scope check.
/// </summary>
public sealed record TenantScopeCheckResult
{
/// <summary>
/// Whether the operation is allowed.
/// </summary>
public bool Allowed { get; init; }
/// <summary>
/// Denial reason if not allowed.
/// </summary>
public TenantScopeDenialReason? DenialReason { get; init; }
/// <summary>
/// Detailed message explaining the decision.
/// </summary>
public string? Message { get; init; }
/// <summary>
/// Whether this is a cross-tenant operation.
/// </summary>
public bool IsCrossTenant { get; init; }
/// <summary>
/// Whether the operation was allowed via whitelist.
/// </summary>
public bool AllowedViaWhitelist { get; init; }
/// <summary>
/// Resources that failed scope check.
/// </summary>
public IReadOnlyList<string> DeniedResources { get; init; } = [];
public static TenantScopeCheckResult Allow() => new() { Allowed = true };
public static TenantScopeCheckResult AllowCrossTenant(bool viaWhitelist) => new()
{
Allowed = true,
IsCrossTenant = true,
AllowedViaWhitelist = viaWhitelist
};
public static TenantScopeCheckResult Deny(TenantScopeDenialReason reason, string message) => new()
{
Allowed = false,
DenialReason = reason,
Message = message
};
public static TenantScopeCheckResult DenyResources(IReadOnlyList<string> resources, string message) => new()
{
Allowed = false,
DenialReason = TenantScopeDenialReason.ResourceScopeViolation,
Message = message,
DeniedResources = resources
};
}
/// <summary>
/// Reason for denying a tenant scope check.
/// </summary>
public enum TenantScopeDenialReason
{
/// <summary>Cross-tenant access not allowed.</summary>
CrossTenantNotAllowed,
/// <summary>Target tenant not in whitelist.</summary>
TargetTenantNotWhitelisted,
/// <summary>Resource belongs to different tenant.</summary>
ResourceScopeViolation,
/// <summary>Project scope violation.</summary>
ProjectScopeViolation,
/// <summary>Strict isolation prevents operation.</summary>
StrictIsolationViolation,
/// <summary>Invalid tenant ID format.</summary>
InvalidTenantId,
/// <summary>Tenant scope enforcement is disabled but operation requires it.</summary>
EnforcementDisabled
}
/// <summary>
/// Types of tenant-scoped operations.
/// </summary>
public enum TenantScopeOperation
{
/// <summary>Export data from tenant.</summary>
Export,
/// <summary>Read/access data within tenant.</summary>
Read,
/// <summary>Share data with another tenant.</summary>
Share,
/// <summary>Verify data from tenant.</summary>
Verify,
/// <summary>Delete data within tenant.</summary>
Delete
}
/// <summary>
/// Tenant-scoped manifest entry with prefix information.
/// </summary>
public sealed record TenantScopedManifestEntry
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("projectId")]
public string? ProjectId { get; init; }
[JsonPropertyName("relativePath")]
public required string RelativePath { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("sizeBytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("mediaType")]
public string? MediaType { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Provenance context for tenant-scoped exports.
/// </summary>
public sealed record TenantProvenanceContext
{
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("projectId")]
public string? ProjectId { get; init; }
[JsonPropertyName("exportRunId")]
public required string ExportRunId { get; init; }
[JsonPropertyName("exportedAt")]
public required DateTimeOffset ExportedAt { get; init; }
[JsonPropertyName("scopePrefix")]
public required string ScopePrefix { get; init; }
[JsonPropertyName("artifactCount")]
public int ArtifactCount { get; init; }
[JsonPropertyName("totalSizeBytes")]
public long TotalSizeBytes { get; init; }
[JsonPropertyName("crossTenantRefs")]
public IReadOnlyList<CrossTenantRef>? CrossTenantRefs { get; init; }
}
/// <summary>
/// Reference to a cross-tenant resource in an export.
/// </summary>
public sealed record CrossTenantRef
{
[JsonPropertyName("sourceTenantId")]
public required string SourceTenantId { get; init; }
[JsonPropertyName("resourceId")]
public required string ResourceId { get; init; }
[JsonPropertyName("resourceType")]
public required string ResourceType { get; init; }
[JsonPropertyName("allowedVia")]
public required string AllowedVia { get; init; }
}
/// <summary>
/// Tenant scope validation result.
/// </summary>
public sealed record TenantScopeValidationResult
{
public bool IsValid { get; init; }
public IReadOnlyList<TenantScopeValidationError> Errors { get; init; } = [];
public static TenantScopeValidationResult Valid() => new() { IsValid = true };
public static TenantScopeValidationResult Invalid(params TenantScopeValidationError[] errors) => new()
{
IsValid = false,
Errors = errors
};
}
/// <summary>
/// Validation error for tenant scope.
/// </summary>
public sealed record TenantScopeValidationError
{
public required string Code { get; init; }
public required string Message { get; init; }
public string? Field { get; init; }
}
/// <summary>
/// Error codes for tenant scope enforcement.
/// </summary>
public static class TenantScopeErrorCodes
{
public const string InvalidTenantId = "TENANT_INVALID_ID";
public const string InvalidProjectId = "TENANT_INVALID_PROJECT_ID";
public const string CrossTenantDenied = "TENANT_CROSS_TENANT_DENIED";
public const string NotWhitelisted = "TENANT_NOT_WHITELISTED";
public const string ResourceScopeViolation = "TENANT_RESOURCE_SCOPE_VIOLATION";
public const string ProjectScopeViolation = "TENANT_PROJECT_SCOPE_VIOLATION";
public const string StrictIsolation = "TENANT_STRICT_ISOLATION";
public const string InvalidPathPrefix = "TENANT_INVALID_PATH_PREFIX";
public const string MissingTenantContext = "TENANT_MISSING_CONTEXT";
}
/// <summary>
/// Helper for tenant ID validation.
/// </summary>
public static partial class TenantIdValidator
{
// Pattern: alphanumeric with hyphens and underscores, 3-64 chars, or valid GUID
private static readonly Regex TenantIdPattern = TenantIdRegex();
[GeneratedRegex(@"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,63}$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", RegexOptions.Compiled)]
private static partial Regex TenantIdRegex();
/// <summary>
/// Validates a tenant ID format.
/// </summary>
public static bool IsValid(string? tenantId)
{
if (string.IsNullOrWhiteSpace(tenantId)) return false;
return TenantIdPattern.IsMatch(tenantId);
}
/// <summary>
/// Validates a tenant ID and returns errors if invalid.
/// </summary>
public static TenantScopeValidationResult Validate(string? tenantId)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
return TenantScopeValidationResult.Invalid(new TenantScopeValidationError
{
Code = TenantScopeErrorCodes.InvalidTenantId,
Message = "Tenant ID is required",
Field = "tenantId"
});
}
if (!IsValid(tenantId))
{
return TenantScopeValidationResult.Invalid(new TenantScopeValidationError
{
Code = TenantScopeErrorCodes.InvalidTenantId,
Message = "Tenant ID must be 3-64 alphanumeric characters (hyphens/underscores allowed) or a valid GUID",
Field = "tenantId"
});
}
return TenantScopeValidationResult.Valid();
}
}

View File

@@ -0,0 +1,57 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Extension methods for registering tenant scope services.
/// </summary>
public static class TenantScopeServiceCollectionExtensions
{
/// <summary>
/// Registers tenant scope services with in-memory stores.
/// </summary>
public static IServiceCollection AddTenantScopeEnforcement(this IServiceCollection services)
{
services.AddSingleton<InMemoryTenantScopeConfigStore>();
services.AddSingleton<ITenantScopeConfigStore>(sp => sp.GetRequiredService<InMemoryTenantScopeConfigStore>());
services.AddSingleton<InMemoryTenantResourceStore>();
services.AddSingleton<ITenantResourceStore>(sp => sp.GetRequiredService<InMemoryTenantResourceStore>());
services.AddSingleton<ITenantScopeEnforcer, TenantScopeEnforcer>();
return services;
}
/// <summary>
/// Registers tenant scope services with custom stores.
/// </summary>
public static IServiceCollection AddTenantScopeEnforcement<TConfigStore, TResourceStore>(
this IServiceCollection services)
where TConfigStore : class, ITenantScopeConfigStore
where TResourceStore : class, ITenantResourceStore
{
services.AddSingleton<ITenantScopeConfigStore, TConfigStore>();
services.AddSingleton<ITenantResourceStore, TResourceStore>();
services.AddSingleton<ITenantScopeEnforcer, TenantScopeEnforcer>();
return services;
}
/// <summary>
/// Configures the default tenant scope configuration.
/// </summary>
public static IServiceCollection ConfigureTenantScope(
this IServiceCollection services,
Action<TenantScopeConfig> configure)
{
services.AddSingleton(sp =>
{
var config = new TenantScopeConfig();
configure(config);
return config;
});
return services;
}
}

View File

@@ -0,0 +1,859 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Request to verify an export bundle or artifact.
/// </summary>
public sealed record ExportVerificationRequest
{
/// <summary>
/// Run ID to verify.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant ID for scope validation.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Path to the manifest file.
/// </summary>
public string? ManifestPath { get; init; }
/// <summary>
/// Manifest content (if not reading from path).
/// </summary>
public string? ManifestContent { get; init; }
/// <summary>
/// Path to signature file.
/// </summary>
public string? SignaturePath { get; init; }
/// <summary>
/// Signature content (if not reading from path).
/// </summary>
public string? SignatureContent { get; init; }
/// <summary>
/// Verification options.
/// </summary>
public ExportVerificationOptions Options { get; init; } = new();
}
/// <summary>
/// Options for verification.
/// </summary>
public sealed record ExportVerificationOptions
{
/// <summary>
/// Whether to verify content hashes.
/// </summary>
[JsonPropertyName("verifyHashes")]
public bool VerifyHashes { get; init; } = true;
/// <summary>
/// Whether to verify signatures.
/// </summary>
[JsonPropertyName("verifySignatures")]
public bool VerifySignatures { get; init; } = true;
/// <summary>
/// Whether to check signature against Rekor transparency log.
/// </summary>
[JsonPropertyName("checkRekor")]
public bool CheckRekor { get; init; } = false;
/// <summary>
/// Whether to verify manifest integrity (internal consistency).
/// </summary>
[JsonPropertyName("verifyManifestIntegrity")]
public bool VerifyManifestIntegrity { get; init; } = true;
/// <summary>
/// Whether to verify encryption metadata.
/// </summary>
[JsonPropertyName("verifyEncryption")]
public bool VerifyEncryption { get; init; } = true;
/// <summary>
/// Trusted public keys for signature verification (PEM or base64).
/// </summary>
[JsonPropertyName("trustedKeys")]
public IReadOnlyList<string> TrustedKeys { get; init; } = [];
/// <summary>
/// Trusted certificate roots for signature verification.
/// </summary>
[JsonPropertyName("trustedRoots")]
public IReadOnlyList<string> TrustedRoots { get; init; } = [];
}
/// <summary>
/// Result of export verification.
/// </summary>
public sealed record ExportVerificationResult
{
/// <summary>
/// Overall verification status.
/// </summary>
public required VerificationStatus Status { get; init; }
/// <summary>
/// Whether verification passed.
/// </summary>
public bool IsValid => Status == VerificationStatus.Valid;
/// <summary>
/// Run ID that was verified.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Manifest verification result.
/// </summary>
public ManifestVerificationResult? Manifest { get; init; }
/// <summary>
/// Signature verification result.
/// </summary>
public SignatureVerificationResult? Signature { get; init; }
/// <summary>
/// Hash verification results for individual files.
/// </summary>
public IReadOnlyList<HashVerificationResult> FileHashes { get; init; } = [];
/// <summary>
/// Encryption verification result.
/// </summary>
public EncryptionVerificationResult? Encryption { get; init; }
/// <summary>
/// Attestation status.
/// </summary>
public AttestationStatus? Attestation { get; init; }
/// <summary>
/// Verification errors.
/// </summary>
public IReadOnlyList<VerificationError> Errors { get; init; } = [];
/// <summary>
/// Verification warnings.
/// </summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
/// <summary>
/// When verification was performed.
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
public static ExportVerificationResult Failed(Guid runId, params VerificationError[] errors)
=> new()
{
Status = VerificationStatus.Invalid,
RunId = runId,
Errors = errors
};
}
/// <summary>
/// Overall verification status.
/// </summary>
public enum VerificationStatus
{
/// <summary>
/// All checks passed.
/// </summary>
Valid = 1,
/// <summary>
/// Some checks failed.
/// </summary>
Invalid = 2,
/// <summary>
/// Verification was partial (some checks skipped).
/// </summary>
Partial = 3,
/// <summary>
/// Verification could not be performed.
/// </summary>
Error = 4,
/// <summary>
/// Verification is still in progress.
/// </summary>
Pending = 5
}
/// <summary>
/// Result of manifest verification.
/// </summary>
public sealed record ManifestVerificationResult
{
/// <summary>
/// Whether the manifest is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Manifest format version.
/// </summary>
public string? FormatVersion { get; init; }
/// <summary>
/// Number of entries in manifest.
/// </summary>
public int EntryCount { get; init; }
/// <summary>
/// Manifest digest.
/// </summary>
public string? ManifestDigest { get; init; }
/// <summary>
/// Expected manifest digest (if provided).
/// </summary>
public string? ExpectedDigest { get; init; }
/// <summary>
/// Whether manifest digest matches expected.
/// </summary>
public bool DigestMatch { get; init; }
/// <summary>
/// Validation errors.
/// </summary>
public IReadOnlyList<string> ValidationErrors { get; init; } = [];
}
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record SignatureVerificationResult
{
/// <summary>
/// Whether the signature is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Signature algorithm used.
/// </summary>
public string? Algorithm { get; init; }
/// <summary>
/// Key ID that signed.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Signer identity (certificate subject, key fingerprint).
/// </summary>
public string? SignerIdentity { get; init; }
/// <summary>
/// When the signature was created.
/// </summary>
public DateTimeOffset? SignedAt { get; init; }
/// <summary>
/// Whether the signature was found in Rekor.
/// </summary>
public bool? RekorVerified { get; init; }
/// <summary>
/// Rekor log index if found.
/// </summary>
public long? RekorLogIndex { get; init; }
/// <summary>
/// Certificate chain if available.
/// </summary>
public IReadOnlyList<string> CertificateChain { get; init; } = [];
/// <summary>
/// Verification errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Result of hash verification for a single file.
/// </summary>
public sealed record HashVerificationResult
{
/// <summary>
/// File path.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Whether the hash matches.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Expected hash from manifest.
/// </summary>
public string? ExpectedHash { get; init; }
/// <summary>
/// Computed hash.
/// </summary>
public string? ComputedHash { get; init; }
/// <summary>
/// Hash algorithm used.
/// </summary>
public string? Algorithm { get; init; }
/// <summary>
/// File size in bytes.
/// </summary>
public long? SizeBytes { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
}
/// <summary>
/// Result of encryption verification.
/// </summary>
public sealed record EncryptionVerificationResult
{
/// <summary>
/// Whether encryption metadata is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Encryption mode.
/// </summary>
public string? Mode { get; init; }
/// <summary>
/// Number of recipients.
/// </summary>
public int RecipientCount { get; init; }
/// <summary>
/// AAD format.
/// </summary>
public string? AadFormat { get; init; }
/// <summary>
/// Whether all encrypted files have valid nonces.
/// </summary>
public bool NonceFormatValid { get; init; }
/// <summary>
/// Validation errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Attestation status for a verified export.
/// </summary>
public sealed record AttestationStatus
{
/// <summary>
/// Whether attestation is present.
/// </summary>
public bool HasAttestation { get; init; }
/// <summary>
/// Attestation type (in-toto, DSSE, etc.).
/// </summary>
public string? Type { get; init; }
/// <summary>
/// Predicate type.
/// </summary>
public string? PredicateType { get; init; }
/// <summary>
/// Whether attestation signature is valid.
/// </summary>
public bool? SignatureValid { get; init; }
/// <summary>
/// Subject digests from attestation.
/// </summary>
public IReadOnlyList<string> SubjectDigests { get; init; } = [];
/// <summary>
/// Attestation errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Verification error.
/// </summary>
public sealed record VerificationError
{
/// <summary>
/// Error code.
/// </summary>
public required string Code { get; init; }
/// <summary>
/// Error message.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Path or component that failed.
/// </summary>
public string? Path { get; init; }
/// <summary>
/// Additional details.
/// </summary>
public string? Details { get; init; }
}
/// <summary>
/// Common verification error codes.
/// </summary>
public static class VerificationErrorCodes
{
public const string ManifestNotFound = "MANIFEST_NOT_FOUND";
public const string ManifestParseError = "MANIFEST_PARSE_ERROR";
public const string ManifestDigestMismatch = "MANIFEST_DIGEST_MISMATCH";
public const string SignatureNotFound = "SIGNATURE_NOT_FOUND";
public const string SignatureInvalid = "SIGNATURE_INVALID";
public const string SignatureExpired = "SIGNATURE_EXPIRED";
public const string KeyNotTrusted = "KEY_NOT_TRUSTED";
public const string HashMismatch = "HASH_MISMATCH";
public const string FileNotFound = "FILE_NOT_FOUND";
public const string EncryptionInvalid = "ENCRYPTION_INVALID";
public const string AttestationInvalid = "ATTESTATION_INVALID";
public const string RekorVerificationFailed = "REKOR_VERIFICATION_FAILED";
public const string TenantMismatch = "TENANT_MISMATCH";
public const string PackRunNotFound = "PACK_RUN_NOT_FOUND";
public const string PackRunAttestationInvalid = "PACK_RUN_ATTESTATION_INVALID";
public const string SubjectDigestMismatch = "SUBJECT_DIGEST_MISMATCH";
public const string ProvenanceChainBroken = "PROVENANCE_CHAIN_BROKEN";
}
// ========================================================================
// Pack Run Integration Models
// ========================================================================
/// <summary>
/// Request to verify pack run integration with an export.
/// </summary>
public sealed record PackRunVerificationRequest
{
/// <summary>
/// Export run ID.
/// </summary>
public required Guid ExportRunId { get; init; }
/// <summary>
/// Tenant ID for scope validation.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Pack run ID to verify integration with.
/// </summary>
public Guid? PackRunId { get; init; }
/// <summary>
/// Pack run attestation ID (if different from pack run).
/// </summary>
public string? AttestationId { get; init; }
/// <summary>
/// Whether to verify subject digests match.
/// </summary>
public bool VerifySubjectAlignment { get; init; } = true;
/// <summary>
/// Whether to verify the provenance chain is complete.
/// </summary>
public bool VerifyProvenanceChain { get; init; } = true;
}
/// <summary>
/// Result of pack run integration verification.
/// </summary>
public sealed record PackRunVerificationResult
{
/// <summary>
/// Whether the pack run integration is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Export run ID.
/// </summary>
public required Guid ExportRunId { get; init; }
/// <summary>
/// Pack run ID (if found).
/// </summary>
public Guid? PackRunId { get; init; }
/// <summary>
/// Pack run attestation verification result.
/// </summary>
public PackRunAttestationResult? Attestation { get; init; }
/// <summary>
/// Subject alignment verification result.
/// </summary>
public SubjectAlignmentResult? SubjectAlignment { get; init; }
/// <summary>
/// Provenance chain verification result.
/// </summary>
public ProvenanceChainResult? ProvenanceChain { get; init; }
/// <summary>
/// Provenance links extracted from the integration.
/// </summary>
public IReadOnlyList<ProvenanceLink> ProvenanceLinks { get; init; } = [];
/// <summary>
/// Verification errors.
/// </summary>
public IReadOnlyList<VerificationError> Errors { get; init; } = [];
/// <summary>
/// When verification was performed.
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Result of pack run attestation verification.
/// </summary>
public sealed record PackRunAttestationResult
{
/// <summary>
/// Whether the attestation is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Attestation ID.
/// </summary>
public string? AttestationId { get; init; }
/// <summary>
/// Predicate type.
/// </summary>
public string? PredicateType { get; init; }
/// <summary>
/// Whether the attestation signature is valid.
/// </summary>
public bool SignatureValid { get; init; }
/// <summary>
/// Key ID that signed the attestation.
/// </summary>
public string? SignerKeyId { get; init; }
/// <summary>
/// Subject artifacts in the attestation.
/// </summary>
public IReadOnlyList<AttestationSubject> Subjects { get; init; } = [];
/// <summary>
/// Builder information from provenance.
/// </summary>
public BuilderInfo? Builder { get; init; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
/// <summary>
/// Attestation errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Subject artifact in an attestation.
/// </summary>
public sealed record AttestationSubject
{
/// <summary>
/// Subject name (typically artifact path).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digest algorithm and value pairs.
/// </summary>
public IReadOnlyDictionary<string, string> Digest { get; init; } = new Dictionary<string, string>();
}
/// <summary>
/// Builder information from provenance.
/// </summary>
public sealed record BuilderInfo
{
/// <summary>
/// Builder name/identifier.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Builder version.
/// </summary>
public string? Version { get; init; }
/// <summary>
/// Build timestamp.
/// </summary>
public DateTimeOffset? BuildTimestamp { get; init; }
}
/// <summary>
/// Result of subject alignment verification.
/// </summary>
public sealed record SubjectAlignmentResult
{
/// <summary>
/// Whether all subjects align correctly.
/// </summary>
public bool IsAligned { get; init; }
/// <summary>
/// Total subjects in export.
/// </summary>
public int ExportSubjectCount { get; init; }
/// <summary>
/// Total subjects in pack run attestation.
/// </summary>
public int PackRunSubjectCount { get; init; }
/// <summary>
/// Number of matching subjects.
/// </summary>
public int MatchedCount { get; init; }
/// <summary>
/// Subjects only in export.
/// </summary>
public IReadOnlyList<string> ExportOnlySubjects { get; init; } = [];
/// <summary>
/// Subjects only in pack run.
/// </summary>
public IReadOnlyList<string> PackRunOnlySubjects { get; init; } = [];
/// <summary>
/// Subjects with digest mismatches.
/// </summary>
public IReadOnlyList<DigestMismatch> DigestMismatches { get; init; } = [];
}
/// <summary>
/// Digest mismatch between export and pack run subjects.
/// </summary>
public sealed record DigestMismatch
{
/// <summary>
/// Subject name.
/// </summary>
public required string SubjectName { get; init; }
/// <summary>
/// Digest in export.
/// </summary>
public string? ExportDigest { get; init; }
/// <summary>
/// Digest in pack run attestation.
/// </summary>
public string? PackRunDigest { get; init; }
/// <summary>
/// Algorithm used.
/// </summary>
public string Algorithm { get; init; } = "sha256";
}
/// <summary>
/// Result of provenance chain verification.
/// </summary>
public sealed record ProvenanceChainResult
{
/// <summary>
/// Whether the provenance chain is complete.
/// </summary>
public bool IsComplete { get; init; }
/// <summary>
/// Chain depth (number of links).
/// </summary>
public int ChainDepth { get; init; }
/// <summary>
/// Links in the chain.
/// </summary>
public IReadOnlyList<ProvenanceLink> Links { get; init; } = [];
/// <summary>
/// Missing links in the chain.
/// </summary>
public IReadOnlyList<string> MissingLinks { get; init; } = [];
/// <summary>
/// Chain errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// A link in the provenance chain.
/// </summary>
public sealed record ProvenanceLink
{
/// <summary>
/// Link type.
/// </summary>
public required ProvenanceLinkType Type { get; init; }
/// <summary>
/// Source identifier (e.g., pack run ID, attestation ID).
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Target identifier (e.g., export run ID, artifact path).
/// </summary>
public required string TargetId { get; init; }
/// <summary>
/// Digest of the linked artifact.
/// </summary>
public string? Digest { get; init; }
/// <summary>
/// Link metadata.
/// </summary>
public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>();
/// <summary>
/// When the link was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
}
/// <summary>
/// Types of provenance links.
/// </summary>
public enum ProvenanceLinkType
{
/// <summary>
/// Pack run produces artifact.
/// </summary>
PackRunToArtifact = 1,
/// <summary>
/// Artifact included in export.
/// </summary>
ArtifactToExport = 2,
/// <summary>
/// Attestation references subject.
/// </summary>
AttestationToSubject = 3,
/// <summary>
/// Export references attestation.
/// </summary>
ExportToAttestation = 4,
/// <summary>
/// Signature covers artifact.
/// </summary>
SignatureToArtifact = 5
}
/// <summary>
/// Streaming verification event.
/// </summary>
public sealed record VerificationProgressEvent
{
/// <summary>
/// Event type.
/// </summary>
public required VerificationProgressType Type { get; init; }
/// <summary>
/// Current item being verified.
/// </summary>
public string? CurrentItem { get; init; }
/// <summary>
/// Progress percentage (0-100).
/// </summary>
public int ProgressPercent { get; init; }
/// <summary>
/// Total items to verify.
/// </summary>
public int TotalItems { get; init; }
/// <summary>
/// Items verified so far.
/// </summary>
public int VerifiedItems { get; init; }
/// <summary>
/// Items that passed.
/// </summary>
public int PassedItems { get; init; }
/// <summary>
/// Items that failed.
/// </summary>
public int FailedItems { get; init; }
/// <summary>
/// Message for this event.
/// </summary>
public string? Message { get; init; }
/// <summary>
/// Timestamp.
/// </summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Types of verification progress events.
/// </summary>
public enum VerificationProgressType
{
Started = 1,
ManifestVerified = 2,
SignatureVerified = 3,
HashVerificationStarted = 4,
HashVerificationProgress = 5,
HashVerificationComplete = 6,
EncryptionVerified = 7,
AttestationVerified = 8,
Completed = 9,
Error = 10
}

View File

@@ -0,0 +1,828 @@
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Default implementation of the export verification service.
/// </summary>
public sealed class ExportVerificationService : IExportVerificationService
{
private readonly IExportArtifactStore _artifactStore;
private readonly IPackRunAttestationStore? _packRunStore;
private readonly ILogger<ExportVerificationService> _logger;
public ExportVerificationService(
IExportArtifactStore artifactStore,
ILogger<ExportVerificationService> logger)
: this(artifactStore, null, logger)
{
}
public ExportVerificationService(
IExportArtifactStore artifactStore,
IPackRunAttestationStore? packRunStore,
ILogger<ExportVerificationService> logger)
{
_artifactStore = artifactStore ?? throw new ArgumentNullException(nameof(artifactStore));
_packRunStore = packRunStore;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<ExportVerificationResult> VerifyAsync(
ExportVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Starting verification for run {RunId}",
request.RunId);
var errors = new List<VerificationError>();
var warnings = new List<string>();
// Get run metadata
var metadata = await _artifactStore.GetRunMetadataAsync(request.RunId, cancellationToken);
if (metadata is null)
{
return ExportVerificationResult.Failed(
request.RunId,
new VerificationError
{
Code = VerificationErrorCodes.ManifestNotFound,
Message = $"Run {request.RunId} not found"
});
}
// Verify tenant
if (metadata.TenantId != request.TenantId)
{
return ExportVerificationResult.Failed(
request.RunId,
new VerificationError
{
Code = VerificationErrorCodes.TenantMismatch,
Message = "Tenant ID does not match run"
});
}
ManifestVerificationResult? manifestResult = null;
SignatureVerificationResult? signatureResult = null;
EncryptionVerificationResult? encryptionResult = null;
AttestationStatus? attestationStatus = null;
var hashResults = new List<HashVerificationResult>();
// Get manifest content
var manifestContent = request.ManifestContent
?? await _artifactStore.GetManifestAsync(request.RunId, cancellationToken);
// Verify manifest
if (request.Options.VerifyManifestIntegrity && !string.IsNullOrEmpty(manifestContent))
{
manifestResult = await VerifyManifestAsync(manifestContent, cancellationToken);
if (!manifestResult.IsValid)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.ManifestParseError,
Message = "Manifest validation failed",
Details = string.Join("; ", manifestResult.ValidationErrors)
});
}
}
else if (request.Options.VerifyManifestIntegrity)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.ManifestNotFound,
Message = "No manifest available for verification"
});
}
// Verify signature
if (request.Options.VerifySignatures)
{
var signatureContent = request.SignatureContent
?? await _artifactStore.GetSignatureAsync(request.RunId, cancellationToken);
if (!string.IsNullOrEmpty(signatureContent) && !string.IsNullOrEmpty(manifestContent))
{
var payload = Encoding.UTF8.GetBytes(manifestContent);
signatureResult = await VerifySignatureAsync(
signatureContent,
payload,
request.Options,
cancellationToken);
if (!signatureResult.IsValid)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.SignatureInvalid,
Message = "Signature verification failed",
Details = string.Join("; ", signatureResult.Errors)
});
}
}
else if (request.Options.VerifySignatures)
{
warnings.Add("No signature available for verification");
}
}
// Verify hashes
if (request.Options.VerifyHashes)
{
var artifacts = await _artifactStore.GetArtifactsAsync(request.RunId, cancellationToken);
foreach (var artifact in artifacts)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
using var stream = await _artifactStore.OpenArtifactAsync(
request.RunId,
artifact.RelativePath,
cancellationToken);
if (stream is null)
{
hashResults.Add(new HashVerificationResult
{
Path = artifact.RelativePath,
IsValid = false,
Error = "Artifact not found"
});
continue;
}
var algorithm = artifact.HashAlgorithm ?? "sha256";
var hash = await ComputeStreamHashAsync(stream, algorithm, cancellationToken);
var isValid = string.Equals(
hash,
artifact.ExpectedHash,
StringComparison.OrdinalIgnoreCase);
hashResults.Add(new HashVerificationResult
{
Path = artifact.RelativePath,
IsValid = isValid,
ExpectedHash = artifact.ExpectedHash,
ComputedHash = hash,
Algorithm = algorithm,
SizeBytes = stream.Length
});
if (!isValid)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.HashMismatch,
Message = "Hash mismatch",
Path = artifact.RelativePath,
Details = $"Expected: {artifact.ExpectedHash}, Got: {hash}"
});
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to verify artifact {Path}", artifact.RelativePath);
hashResults.Add(new HashVerificationResult
{
Path = artifact.RelativePath,
IsValid = false,
Error = ex.Message
});
}
}
}
// Verify encryption metadata
if (request.Options.VerifyEncryption && metadata.EncryptionMode is not null)
{
encryptionResult = VerifyEncryptionMetadata(metadata.EncryptionMode);
if (!encryptionResult.IsValid)
{
errors.AddRange(encryptionResult.Errors.Select(e => new VerificationError
{
Code = VerificationErrorCodes.EncryptionInvalid,
Message = e
}));
}
}
// Determine overall status
var status = DetermineStatus(errors, warnings);
_logger.LogInformation(
"Verification completed for run {RunId}: {Status} with {ErrorCount} errors",
request.RunId, status, errors.Count);
return new ExportVerificationResult
{
Status = status,
RunId = request.RunId,
Manifest = manifestResult,
Signature = signatureResult,
FileHashes = hashResults,
Encryption = encryptionResult,
Attestation = attestationStatus,
Errors = errors,
Warnings = warnings
};
}
/// <inheritdoc />
public async IAsyncEnumerable<VerificationProgressEvent> VerifyStreamingAsync(
ExportVerificationRequest request,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.Started,
Message = "Verification started"
};
// Get artifacts for progress tracking
var artifacts = await _artifactStore.GetArtifactsAsync(request.RunId, cancellationToken);
var totalItems = artifacts.Count + 2; // +2 for manifest and signature
var verified = 0;
var passed = 0;
var failed = 0;
// Verify manifest
var manifestContent = request.ManifestContent
?? await _artifactStore.GetManifestAsync(request.RunId, cancellationToken);
if (!string.IsNullOrEmpty(manifestContent) && request.Options.VerifyManifestIntegrity)
{
var manifestResult = await VerifyManifestAsync(manifestContent, cancellationToken);
verified++;
if (manifestResult.IsValid) passed++;
else failed++;
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.ManifestVerified,
ProgressPercent = (int)(verified * 100.0 / totalItems),
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = manifestResult.IsValid ? "Manifest valid" : "Manifest invalid"
};
}
// Verify signature
if (request.Options.VerifySignatures)
{
var signatureContent = request.SignatureContent
?? await _artifactStore.GetSignatureAsync(request.RunId, cancellationToken);
if (!string.IsNullOrEmpty(signatureContent) && !string.IsNullOrEmpty(manifestContent))
{
var payload = Encoding.UTF8.GetBytes(manifestContent);
var sigResult = await VerifySignatureAsync(
signatureContent,
payload,
request.Options,
cancellationToken);
verified++;
if (sigResult.IsValid) passed++;
else failed++;
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.SignatureVerified,
ProgressPercent = (int)(verified * 100.0 / totalItems),
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = sigResult.IsValid ? "Signature valid" : "Signature invalid"
};
}
}
// Verify hashes
if (request.Options.VerifyHashes && artifacts.Count > 0)
{
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.HashVerificationStarted,
TotalItems = artifacts.Count,
Message = $"Verifying {artifacts.Count} files"
};
foreach (var artifact in artifacts)
{
cancellationToken.ThrowIfCancellationRequested();
bool isValid = false;
try
{
using var stream = await _artifactStore.OpenArtifactAsync(
request.RunId,
artifact.RelativePath,
cancellationToken);
if (stream is not null)
{
var hash = await ComputeStreamHashAsync(
stream,
artifact.HashAlgorithm ?? "sha256",
cancellationToken);
isValid = string.Equals(hash, artifact.ExpectedHash, StringComparison.OrdinalIgnoreCase);
}
}
catch
{
// Ignore - isValid stays false
}
verified++;
if (isValid) passed++;
else failed++;
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.HashVerificationProgress,
CurrentItem = artifact.RelativePath,
ProgressPercent = (int)(verified * 100.0 / totalItems),
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed
};
}
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.HashVerificationComplete,
TotalItems = artifacts.Count,
VerifiedItems = artifacts.Count,
PassedItems = passed,
FailedItems = failed,
Message = $"Hash verification complete: {passed} passed, {failed} failed"
};
}
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.Completed,
ProgressPercent = 100,
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = failed == 0 ? "Verification successful" : $"Verification completed with {failed} failures"
};
}
/// <inheritdoc />
public Task<ManifestVerificationResult> VerifyManifestAsync(
string manifestContent,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
var entryCount = 0;
string? formatVersion = null;
string? manifestDigest = null;
try
{
// Compute manifest digest
manifestDigest = ComputeHash(Encoding.UTF8.GetBytes(manifestContent), "sha256");
// Try to parse as JSON
using var doc = JsonDocument.Parse(manifestContent);
// Check for version
if (doc.RootElement.TryGetProperty("version", out var versionElem))
{
formatVersion = versionElem.GetString();
}
// Check for entries array
if (doc.RootElement.TryGetProperty("files", out var filesElem) && filesElem.ValueKind == JsonValueKind.Array)
{
entryCount = filesElem.GetArrayLength();
}
else if (doc.RootElement.TryGetProperty("entries", out var entriesElem) && entriesElem.ValueKind == JsonValueKind.Array)
{
entryCount = entriesElem.GetArrayLength();
}
else if (doc.RootElement.ValueKind == JsonValueKind.Array)
{
// Manifest is just an array of entries
entryCount = doc.RootElement.GetArrayLength();
}
}
catch (JsonException ex)
{
// Try parsing as NDJSON
try
{
var lines = manifestContent.Split('\n', StringSplitOptions.RemoveEmptyEntries);
entryCount = 0;
foreach (var line in lines)
{
using var lineDoc = JsonDocument.Parse(line);
entryCount++;
}
}
catch
{
errors.Add($"Invalid manifest format: {ex.Message}");
}
}
return Task.FromResult(new ManifestVerificationResult
{
IsValid = errors.Count == 0,
FormatVersion = formatVersion,
EntryCount = entryCount,
ManifestDigest = manifestDigest,
DigestMatch = true, // No expected digest provided
ValidationErrors = errors
});
}
/// <inheritdoc />
public Task<SignatureVerificationResult> VerifySignatureAsync(
string signatureContent,
byte[] payload,
ExportVerificationOptions options,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
string? algorithm = null;
string? keyId = null;
string? signerIdentity = null;
DateTimeOffset? signedAt = null;
try
{
// Try to parse as DSSE envelope
using var doc = JsonDocument.Parse(signatureContent);
if (doc.RootElement.TryGetProperty("payloadType", out var payloadType))
{
// DSSE format
if (doc.RootElement.TryGetProperty("signatures", out var signatures) &&
signatures.ValueKind == JsonValueKind.Array &&
signatures.GetArrayLength() > 0)
{
var firstSig = signatures[0];
keyId = firstSig.TryGetProperty("keyid", out var kid) ? kid.GetString() : null;
// In a real implementation, we would verify the signature here
// For now, we just validate structure
algorithm = "DSSE";
}
else
{
errors.Add("DSSE envelope has no signatures");
}
}
else
{
// Unknown signature format
errors.Add("Unknown signature format");
}
// Check if we have trusted keys and validate
if (options.TrustedKeys.Count > 0 && keyId is not null)
{
if (!options.TrustedKeys.Contains(keyId))
{
errors.Add($"Signer key {keyId} is not in trusted keys list");
}
}
}
catch (JsonException ex)
{
errors.Add($"Failed to parse signature: {ex.Message}");
}
return Task.FromResult(new SignatureVerificationResult
{
IsValid = errors.Count == 0,
Algorithm = algorithm,
KeyId = keyId,
SignerIdentity = signerIdentity,
SignedAt = signedAt,
Errors = errors
});
}
/// <inheritdoc />
public async Task<string> ComputeHashAsync(
string filePath,
string algorithm = "sha256",
CancellationToken cancellationToken = default)
{
using var stream = File.OpenRead(filePath);
return await ComputeStreamHashAsync(stream, algorithm, cancellationToken);
}
/// <inheritdoc />
public string ComputeHash(ReadOnlySpan<byte> content, string algorithm = "sha256")
{
using var hasher = CreateHashAlgorithm(algorithm);
var hash = hasher.ComputeHash(content.ToArray());
return Convert.ToHexString(hash).ToLowerInvariant();
}
private async Task<string> ComputeStreamHashAsync(
Stream stream,
string algorithm,
CancellationToken cancellationToken)
{
using var hasher = CreateHashAlgorithm(algorithm);
var hash = await hasher.ComputeHashAsync(stream, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static HashAlgorithm CreateHashAlgorithm(string algorithm)
{
return algorithm.ToLowerInvariant() switch
{
"sha256" => SHA256.Create(),
"sha384" => SHA384.Create(),
"sha512" => SHA512.Create(),
_ => throw new ArgumentException($"Unsupported hash algorithm: {algorithm}", nameof(algorithm))
};
}
private static VerificationStatus DetermineStatus(List<VerificationError> errors, List<string> warnings)
{
if (errors.Count == 0)
{
return warnings.Count > 0 ? VerificationStatus.Partial : VerificationStatus.Valid;
}
return errors.Any(e => e.Code == VerificationErrorCodes.TenantMismatch ||
e.Code == VerificationErrorCodes.ManifestNotFound)
? VerificationStatus.Error
: VerificationStatus.Invalid;
}
private static EncryptionVerificationResult VerifyEncryptionMetadata(string encryptionMode)
{
var errors = new List<string>();
var validModes = new[] { "aes-gcm+age", "aes-gcm+kms", "none" };
if (!validModes.Contains(encryptionMode, StringComparer.OrdinalIgnoreCase))
{
errors.Add($"Unknown encryption mode: {encryptionMode}");
}
return new EncryptionVerificationResult
{
IsValid = errors.Count == 0,
Mode = encryptionMode,
RecipientCount = 0, // Would need to parse metadata to get this
AadFormat = "{runId}:{relativePath}",
NonceFormatValid = true,
Errors = errors
};
}
/// <inheritdoc />
public async Task<PackRunVerificationResult> VerifyPackRunIntegrationAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var errors = new List<VerificationError>();
PackRunAttestationResult? attestationResult = null;
SubjectAlignmentResult? alignmentResult = null;
ProvenanceChainResult? chainResult = null;
var provenanceLinks = new List<ProvenanceLink>();
// Get pack run attestation if store is available
if (_packRunStore is not null && request.PackRunId.HasValue)
{
var attestationData = await _packRunStore.GetAttestationAsync(
request.PackRunId.Value,
cancellationToken);
if (attestationData is not null)
{
attestationResult = new PackRunAttestationResult
{
IsValid = attestationData.Status == "Signed",
AttestationId = attestationData.AttestationId,
PredicateType = attestationData.PredicateType,
SignatureValid = attestationData.Status == "Signed",
Subjects = attestationData.Subjects,
Builder = attestationData.Builder,
CreatedAt = attestationData.CreatedAt
};
// Extract provenance links
provenanceLinks.AddRange(await ExtractProvenanceLinksAsync(
request.ExportRunId,
request.PackRunId.Value,
cancellationToken));
}
else
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.PackRunNotFound,
Message = $"Pack run {request.PackRunId} attestation not found"
});
}
}
// Verify subject alignment
if (request.VerifySubjectAlignment && attestationResult is not null)
{
// Get export manifest subjects (simplified - in real implementation would parse manifest)
var exportSubjects = new List<AttestationSubject>();
alignmentResult = VerifySubjectAlignment(exportSubjects, attestationResult.Subjects);
if (!alignmentResult.IsAligned)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.SubjectDigestMismatch,
Message = "Subject digests do not align between export and pack run"
});
}
}
// Verify provenance chain
if (request.VerifyProvenanceChain)
{
chainResult = new ProvenanceChainResult
{
IsComplete = provenanceLinks.Count > 0,
ChainDepth = provenanceLinks.Count,
Links = provenanceLinks,
MissingLinks = [],
Errors = []
};
if (!chainResult.IsComplete)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.ProvenanceChainBroken,
Message = "Provenance chain is incomplete or broken"
});
}
}
return new PackRunVerificationResult
{
IsValid = errors.Count == 0,
ExportRunId = request.ExportRunId,
PackRunId = request.PackRunId,
Attestation = attestationResult,
SubjectAlignment = alignmentResult,
ProvenanceChain = chainResult,
ProvenanceLinks = provenanceLinks,
Errors = errors
};
}
/// <inheritdoc />
public SubjectAlignmentResult VerifySubjectAlignment(
IReadOnlyList<AttestationSubject> exportSubjects,
IReadOnlyList<AttestationSubject> packRunSubjects)
{
var exportMap = exportSubjects.ToDictionary(
s => s.Name,
s => s.Digest.TryGetValue("sha256", out var d) ? d : null,
StringComparer.OrdinalIgnoreCase);
var packRunMap = packRunSubjects.ToDictionary(
s => s.Name,
s => s.Digest.TryGetValue("sha256", out var d) ? d : null,
StringComparer.OrdinalIgnoreCase);
var matched = 0;
var exportOnly = new List<string>();
var packRunOnly = new List<string>();
var mismatches = new List<DigestMismatch>();
// Check all export subjects
foreach (var (name, digest) in exportMap)
{
if (packRunMap.TryGetValue(name, out var packRunDigest))
{
if (string.Equals(digest, packRunDigest, StringComparison.OrdinalIgnoreCase))
{
matched++;
}
else
{
mismatches.Add(new DigestMismatch
{
SubjectName = name,
ExportDigest = digest,
PackRunDigest = packRunDigest
});
}
}
else
{
exportOnly.Add(name);
}
}
// Check for pack run subjects not in export
foreach (var name in packRunMap.Keys)
{
if (!exportMap.ContainsKey(name))
{
packRunOnly.Add(name);
}
}
return new SubjectAlignmentResult
{
IsAligned = mismatches.Count == 0 && exportOnly.Count == 0,
ExportSubjectCount = exportSubjects.Count,
PackRunSubjectCount = packRunSubjects.Count,
MatchedCount = matched,
ExportOnlySubjects = exportOnly,
PackRunOnlySubjects = packRunOnly,
DigestMismatches = mismatches
};
}
/// <inheritdoc />
public async Task<IReadOnlyList<ProvenanceLink>> ExtractProvenanceLinksAsync(
Guid exportRunId,
Guid packRunId,
CancellationToken cancellationToken = default)
{
var links = new List<ProvenanceLink>();
if (_packRunStore is null)
{
return links;
}
var attestation = await _packRunStore.GetAttestationAsync(packRunId, cancellationToken);
if (attestation is not null)
{
// Link from pack run to attestation
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.ExportToAttestation,
SourceId = exportRunId.ToString(),
TargetId = attestation.AttestationId,
CreatedAt = attestation.CreatedAt
});
// Links from attestation to subjects
foreach (var subject in attestation.Subjects)
{
var digest = subject.Digest.TryGetValue("sha256", out var d) ? d : null;
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.AttestationToSubject,
SourceId = attestation.AttestationId,
TargetId = subject.Name,
Digest = digest
});
// Link from pack run to artifact
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.PackRunToArtifact,
SourceId = packRunId.ToString(),
TargetId = subject.Name,
Digest = digest
});
// Link from artifact to export
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.ArtifactToExport,
SourceId = subject.Name,
TargetId = exportRunId.ToString(),
Digest = digest
});
}
}
return links;
}
}

View File

@@ -0,0 +1,32 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Extension methods for registering export verification services.
/// </summary>
public static class ExportVerificationServiceCollectionExtensions
{
/// <summary>
/// Registers export verification services with in-memory artifact store.
/// </summary>
public static IServiceCollection AddExportVerification(this IServiceCollection services)
{
services.AddSingleton<IExportArtifactStore, InMemoryExportArtifactStore>();
services.AddSingleton<IExportVerificationService, ExportVerificationService>();
return services;
}
/// <summary>
/// Registers export verification services with custom artifact store.
/// </summary>
public static IServiceCollection AddExportVerification<TArtifactStore>(this IServiceCollection services)
where TArtifactStore : class, IExportArtifactStore
{
services.AddSingleton<IExportArtifactStore, TArtifactStore>();
services.AddSingleton<IExportVerificationService, ExportVerificationService>();
return services;
}
}

View File

@@ -0,0 +1,278 @@
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Service for verifying export bundles and artifacts.
/// </summary>
public interface IExportVerificationService
{
/// <summary>
/// Verifies an export bundle.
/// </summary>
/// <param name="request">Verification request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<ExportVerificationResult> VerifyAsync(
ExportVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies an export bundle with progress streaming.
/// </summary>
/// <param name="request">Verification request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Async enumerable of progress events, ending with final result.</returns>
IAsyncEnumerable<VerificationProgressEvent> VerifyStreamingAsync(
ExportVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a manifest's internal consistency.
/// </summary>
/// <param name="manifestContent">Manifest JSON content.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Manifest verification result.</returns>
Task<ManifestVerificationResult> VerifyManifestAsync(
string manifestContent,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a DSSE signature.
/// </summary>
/// <param name="signatureContent">Signature content (DSSE envelope).</param>
/// <param name="payload">Payload that was signed.</param>
/// <param name="options">Verification options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Signature verification result.</returns>
Task<SignatureVerificationResult> VerifySignatureAsync(
string signatureContent,
byte[] payload,
ExportVerificationOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes hash for a file.
/// </summary>
/// <param name="filePath">Path to file.</param>
/// <param name="algorithm">Hash algorithm (sha256, sha384, sha512).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Hex-encoded hash.</returns>
Task<string> ComputeHashAsync(
string filePath,
string algorithm = "sha256",
CancellationToken cancellationToken = default);
/// <summary>
/// Computes hash for content.
/// </summary>
/// <param name="content">Content to hash.</param>
/// <param name="algorithm">Hash algorithm.</param>
/// <returns>Hex-encoded hash.</returns>
string ComputeHash(ReadOnlySpan<byte> content, string algorithm = "sha256");
/// <summary>
/// Verifies pack run integration with an export.
/// </summary>
/// <param name="request">Pack run verification request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Pack run verification result.</returns>
Task<PackRunVerificationResult> VerifyPackRunIntegrationAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies subject digest alignment between export and pack run.
/// </summary>
/// <param name="exportSubjects">Subjects from export manifest.</param>
/// <param name="packRunSubjects">Subjects from pack run attestation.</param>
/// <returns>Subject alignment result.</returns>
SubjectAlignmentResult VerifySubjectAlignment(
IReadOnlyList<AttestationSubject> exportSubjects,
IReadOnlyList<AttestationSubject> packRunSubjects);
/// <summary>
/// Extracts provenance links from an export and its pack run.
/// </summary>
/// <param name="exportRunId">Export run ID.</param>
/// <param name="packRunId">Pack run ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Provenance links.</returns>
Task<IReadOnlyList<ProvenanceLink>> ExtractProvenanceLinksAsync(
Guid exportRunId,
Guid packRunId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Store for retrieving pack run attestations.
/// </summary>
public interface IPackRunAttestationStore
{
/// <summary>
/// Gets the attestation for a pack run.
/// </summary>
Task<PackRunAttestationData?> GetAttestationAsync(Guid packRunId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets attestation by ID.
/// </summary>
Task<PackRunAttestationData?> GetAttestationByIdAsync(string attestationId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run IDs linked to an export run.
/// </summary>
Task<IReadOnlyList<Guid>> GetLinkedPackRunsAsync(Guid exportRunId, CancellationToken cancellationToken = default);
}
/// <summary>
/// Pack run attestation data.
/// </summary>
public sealed record PackRunAttestationData
{
/// <summary>
/// Pack run ID.
/// </summary>
public required Guid PackRunId { get; init; }
/// <summary>
/// Attestation ID.
/// </summary>
public required string AttestationId { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// DSSE envelope content.
/// </summary>
public string? DsseEnvelope { get; init; }
/// <summary>
/// Predicate type.
/// </summary>
public string? PredicateType { get; init; }
/// <summary>
/// Subjects in the attestation.
/// </summary>
public IReadOnlyList<AttestationSubject> Subjects { get; init; } = [];
/// <summary>
/// Builder information.
/// </summary>
public BuilderInfo? Builder { get; init; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
/// <summary>
/// Attestation status.
/// </summary>
public string? Status { get; init; }
}
/// <summary>
/// Store for retrieving export artifacts for verification.
/// </summary>
public interface IExportArtifactStore
{
/// <summary>
/// Gets the manifest for a run.
/// </summary>
Task<string?> GetManifestAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the signature for a run.
/// </summary>
Task<string?> GetSignatureAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets artifact paths for a run.
/// </summary>
Task<IReadOnlyList<ArtifactInfo>> GetArtifactsAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Opens a stream to read an artifact.
/// </summary>
Task<Stream?> OpenArtifactAsync(Guid runId, string relativePath, CancellationToken cancellationToken = default);
/// <summary>
/// Gets run metadata.
/// </summary>
Task<RunMetadata?> GetRunMetadataAsync(Guid runId, CancellationToken cancellationToken = default);
}
/// <summary>
/// Information about an artifact.
/// </summary>
public sealed record ArtifactInfo
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Expected hash from manifest.
/// </summary>
public string? ExpectedHash { get; init; }
/// <summary>
/// Hash algorithm.
/// </summary>
public string? HashAlgorithm { get; init; }
/// <summary>
/// Expected size in bytes.
/// </summary>
public long? ExpectedSize { get; init; }
/// <summary>
/// Content type.
/// </summary>
public string? ContentType { get; init; }
/// <summary>
/// Whether the artifact is encrypted.
/// </summary>
public bool IsEncrypted { get; init; }
}
/// <summary>
/// Run metadata for verification.
/// </summary>
public sealed record RunMetadata
{
/// <summary>
/// Run ID.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Profile ID.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// When the run completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; init; }
/// <summary>
/// Encryption mode used.
/// </summary>
public string? EncryptionMode { get; init; }
/// <summary>
/// Manifest digest.
/// </summary>
public string? ManifestDigest { get; init; }
}

View File

@@ -0,0 +1,136 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// In-memory implementation of the export artifact store for testing.
/// </summary>
public sealed class InMemoryExportArtifactStore : IExportArtifactStore
{
private readonly ConcurrentDictionary<Guid, RunData> _runs = new();
/// <summary>
/// Adds a run for testing.
/// </summary>
public void AddRun(RunMetadata metadata)
{
_runs[metadata.RunId] = new RunData
{
Metadata = metadata,
Artifacts = new ConcurrentDictionary<string, ArtifactData>()
};
}
/// <summary>
/// Sets the manifest for a run.
/// </summary>
public void SetManifest(Guid runId, string manifest)
{
if (_runs.TryGetValue(runId, out var run))
{
run.Manifest = manifest;
}
}
/// <summary>
/// Sets the signature for a run.
/// </summary>
public void SetSignature(Guid runId, string signature)
{
if (_runs.TryGetValue(runId, out var run))
{
run.Signature = signature;
}
}
/// <summary>
/// Adds an artifact for a run.
/// </summary>
public void AddArtifact(
Guid runId,
string relativePath,
byte[] content,
string? expectedHash = null,
string? hashAlgorithm = "sha256")
{
if (_runs.TryGetValue(runId, out var run))
{
run.Artifacts[relativePath] = new ArtifactData
{
Content = content,
Info = new ArtifactInfo
{
RelativePath = relativePath,
ExpectedHash = expectedHash,
HashAlgorithm = hashAlgorithm,
ExpectedSize = content.Length
}
};
}
}
/// <inheritdoc />
public Task<string?> GetManifestAsync(Guid runId, CancellationToken cancellationToken = default)
{
_runs.TryGetValue(runId, out var run);
return Task.FromResult(run?.Manifest);
}
/// <inheritdoc />
public Task<string?> GetSignatureAsync(Guid runId, CancellationToken cancellationToken = default)
{
_runs.TryGetValue(runId, out var run);
return Task.FromResult(run?.Signature);
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactInfo>> GetArtifactsAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run))
{
var infos = run.Artifacts.Values.Select(a => a.Info).ToList();
return Task.FromResult<IReadOnlyList<ArtifactInfo>>(infos);
}
return Task.FromResult<IReadOnlyList<ArtifactInfo>>([]);
}
/// <inheritdoc />
public Task<Stream?> OpenArtifactAsync(Guid runId, string relativePath, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run) &&
run.Artifacts.TryGetValue(relativePath, out var artifact))
{
return Task.FromResult<Stream?>(new MemoryStream(artifact.Content));
}
return Task.FromResult<Stream?>(null);
}
/// <inheritdoc />
public Task<RunMetadata?> GetRunMetadataAsync(Guid runId, CancellationToken cancellationToken = default)
{
_runs.TryGetValue(runId, out var run);
return Task.FromResult(run?.Metadata);
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_runs.Clear();
}
private sealed class RunData
{
public required RunMetadata Metadata { get; init; }
public required ConcurrentDictionary<string, ArtifactData> Artifacts { get; init; }
public string? Manifest { get; set; }
public string? Signature { get; set; }
}
private sealed class ArtifactData
{
public required byte[] Content { get; init; }
public required ArtifactInfo Info { get; init; }
}
}

View File

@@ -0,0 +1,77 @@
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// In-memory implementation of pack run attestation store for testing.
/// </summary>
public sealed class InMemoryPackRunAttestationStore : IPackRunAttestationStore
{
private readonly Dictionary<Guid, PackRunAttestationData> _attestations = new();
private readonly Dictionary<string, PackRunAttestationData> _attestationsById = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<Guid, List<Guid>> _exportToPackRunLinks = new();
/// <summary>
/// Adds an attestation to the store.
/// </summary>
public void AddAttestation(PackRunAttestationData attestation)
{
ArgumentNullException.ThrowIfNull(attestation);
_attestations[attestation.PackRunId] = attestation;
_attestationsById[attestation.AttestationId] = attestation;
}
/// <summary>
/// Links a pack run to an export run.
/// </summary>
public void LinkToExport(Guid exportRunId, Guid packRunId)
{
if (!_exportToPackRunLinks.TryGetValue(exportRunId, out var links))
{
links = [];
_exportToPackRunLinks[exportRunId] = links;
}
if (!links.Contains(packRunId))
{
links.Add(packRunId);
}
}
/// <summary>
/// Clears all data from the store.
/// </summary>
public void Clear()
{
_attestations.Clear();
_attestationsById.Clear();
_exportToPackRunLinks.Clear();
}
/// <inheritdoc />
public Task<PackRunAttestationData?> GetAttestationAsync(
Guid packRunId,
CancellationToken cancellationToken = default)
{
_attestations.TryGetValue(packRunId, out var attestation);
return Task.FromResult(attestation);
}
/// <inheritdoc />
public Task<PackRunAttestationData?> GetAttestationByIdAsync(
string attestationId,
CancellationToken cancellationToken = default)
{
_attestationsById.TryGetValue(attestationId, out var attestation);
return Task.FromResult(attestation);
}
/// <inheritdoc />
public Task<IReadOnlyList<Guid>> GetLinkedPackRunsAsync(
Guid exportRunId,
CancellationToken cancellationToken = default)
{
if (_exportToPackRunLinks.TryGetValue(exportRunId, out var links))
{
return Task.FromResult<IReadOnlyList<Guid>>(links);
}
return Task.FromResult<IReadOnlyList<Guid>>([]);
}
}

View File

@@ -0,0 +1,264 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class ExportAdapterRegistryTests
{
[Fact]
public void GetAdapter_ExistingAdapter_ReturnsAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapter("json:raw");
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:raw", adapter.AdapterId);
}
[Fact]
public void GetAdapter_CaseInsensitive_ReturnsAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapter("JSON:RAW");
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:raw", adapter.AdapterId);
}
[Fact]
public void GetAdapter_NonExistent_ReturnsNull()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapter("nonexistent:adapter");
// Assert
Assert.Null(adapter);
}
[Fact]
public void GetAdapterForFormat_JsonRaw_ReturnsJsonRawAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.JsonRaw);
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:raw", adapter.AdapterId);
}
[Fact]
public void GetAdapterForFormat_JsonPolicy_ReturnsJsonPolicyAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.JsonPolicy);
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:policy", adapter.AdapterId);
}
[Fact]
public void GetAdapterForFormat_Ndjson_ReturnsFirstRegisteredAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.Ndjson);
// Assert
Assert.NotNull(adapter);
// Both adapters support Ndjson, first one wins
Assert.Contains(ExportFormat.Ndjson, adapter.SupportedFormats);
}
[Fact]
public void GetAdapterForFormat_Unsupported_ReturnsNull()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.Csv);
// Assert
Assert.Null(adapter);
}
[Fact]
public void GetAllAdapters_ReturnsAllRegisteredAdapters()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapters = registry.GetAllAdapters();
// Assert
Assert.Equal(2, adapters.Count);
Assert.Contains(adapters, a => a.AdapterId == "json:raw");
Assert.Contains(adapters, a => a.AdapterId == "json:policy");
}
[Fact]
public void GetAdapterIds_ReturnsAllAdapterIds()
{
// Arrange
var registry = CreateRegistry();
// Act
var ids = registry.GetAdapterIds();
// Assert
Assert.Equal(2, ids.Count);
Assert.Contains("json:raw", ids);
Assert.Contains("json:policy", ids);
}
[Fact]
public void Registry_EmptyAdapters_HandlesGracefully()
{
// Arrange
var registry = new ExportAdapterRegistry([]);
// Act & Assert
Assert.Null(registry.GetAdapter("json:raw"));
Assert.Null(registry.GetAdapterForFormat(ExportFormat.JsonRaw));
Assert.Empty(registry.GetAllAdapters());
Assert.Empty(registry.GetAdapterIds());
}
[Fact]
public void AddExportAdapters_Extension_RegistersAdapters()
{
// Arrange
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton<ICryptoHash>(new FakeCryptoHash());
// Act
services.AddExportAdapters();
var provider = services.BuildServiceProvider();
// Assert
var registry = provider.GetRequiredService<IExportAdapterRegistry>();
Assert.NotNull(registry);
// At least 2 base adapters (JsonRaw, JsonPolicy) plus additional adapters (Mirror, TrivyDb, TrivyJavaDb)
Assert.True(registry.GetAllAdapters().Count >= 2);
Assert.Contains(registry.GetAllAdapters(), a => a.AdapterId == "json:raw");
Assert.Contains(registry.GetAllAdapters(), a => a.AdapterId == "json:policy");
}
[Fact]
public void AddExportAdapters_WithOptions_RegistersAdaptersWithOptions()
{
// Arrange
var services = new ServiceCollection();
services.AddLogging();
var normalizationOptions = new JsonNormalizationOptions { SortKeys = true };
var redactionOptions = new JsonRedactionOptions { RedactFields = ["password"] };
// Act
services.AddExportAdapters(normalizationOptions, redactionOptions);
var provider = services.BuildServiceProvider();
// Assert
var registry = provider.GetRequiredService<IExportAdapterRegistry>();
Assert.NotNull(registry);
Assert.Equal(2, registry.GetAllAdapters().Count);
}
[Fact]
public void DuplicateAdapterIds_LastOneWins()
{
// Arrange
var adapter1 = new TestAdapter("test:id", "First");
var adapter2 = new TestAdapter("test:id", "Second");
// Act
var registry = new ExportAdapterRegistry([adapter1, adapter2]);
// Assert
var adapter = registry.GetAdapter("test:id");
Assert.NotNull(adapter);
Assert.Equal("Second", adapter.DisplayName);
}
[Fact]
public void FormatMapping_FirstAdapterForFormatWins()
{
// Arrange
var adapter1 = new TestAdapter("adapter:1", "First", [ExportFormat.JsonRaw]);
var adapter2 = new TestAdapter("adapter:2", "Second", [ExportFormat.JsonRaw]);
// Act
var registry = new ExportAdapterRegistry([adapter1, adapter2]);
// Assert
var adapter = registry.GetAdapterForFormat(ExportFormat.JsonRaw);
Assert.NotNull(adapter);
Assert.Equal("adapter:1", adapter.AdapterId);
}
private static ExportAdapterRegistry CreateRegistry()
{
var jsonRaw = new JsonRawAdapter(NullLogger<JsonRawAdapter>.Instance);
var jsonPolicy = new JsonPolicyAdapter(NullLogger<JsonPolicyAdapter>.Instance);
return new ExportAdapterRegistry([jsonRaw, jsonPolicy]);
}
private sealed class TestAdapter : IExportAdapter
{
public string AdapterId { get; }
public string DisplayName { get; }
public IReadOnlyList<ExportFormat> SupportedFormats { get; }
public bool SupportsStreaming => true;
public TestAdapter(string adapterId, string displayName, IReadOnlyList<ExportFormat>? formats = null)
{
AdapterId = adapterId;
DisplayName = displayName;
SupportedFormats = formats ?? [ExportFormat.JsonRaw];
}
public Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
=> Task.FromResult(new ExportAdapterResult { Success = true });
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
await Task.CompletedTask;
yield break;
}
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<string>>([]);
}
}

View File

@@ -0,0 +1,301 @@
using System.IO.Compression;
using System.Text;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class ExportCompressorTests
{
private readonly ExportCompressor _compressor = new();
[Fact]
public void Compress_WithNone_ReturnsUnmodifiedContent()
{
// Arrange
var content = """{"name":"test","version":"1.0.0"}""";
// Act
var result = _compressor.Compress(content, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(Encoding.UTF8.GetBytes(content), result.CompressedData);
Assert.Equal(result.OriginalSizeBytes, result.CompressedSizeBytes);
Assert.Equal(1.0, result.CompressionRatio);
Assert.Equal(CompressionFormat.None, result.Format);
}
[Fact]
public void CompressBytes_WithNone_ReturnsUnmodifiedBytes()
{
// Arrange
var bytes = new byte[] { 1, 2, 3, 4, 5 };
// Act
var result = _compressor.CompressBytes(bytes, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(bytes, result.CompressedData);
}
[Fact]
public void Compress_WithGzip_CompressesContent()
{
// Arrange
var content = new string('a', 1000); // Compressible content
// Act
var result = _compressor.Compress(content, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
Assert.True(result.CompressedSizeBytes < result.OriginalSizeBytes);
Assert.True(result.CompressionRatio < 1.0);
Assert.Equal(CompressionFormat.Gzip, result.Format);
}
[Fact]
public void Compress_WithBrotli_CompressesContent()
{
// Arrange
var content = new string('a', 1000);
// Act
var result = _compressor.Compress(content, CompressionFormat.Brotli);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
Assert.True(result.CompressedSizeBytes < result.OriginalSizeBytes);
Assert.Equal(CompressionFormat.Brotli, result.Format);
}
[Fact]
public void Compress_WithZstd_FallsBackToGzip()
{
// Arrange (Zstd falls back to Gzip in current implementation)
var content = new string('b', 1000);
// Act
var result = _compressor.Compress(content, CompressionFormat.Zstd);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
Assert.Equal(CompressionFormat.Zstd, result.Format);
}
[Fact]
public void Compress_CalculatesSha256Hash()
{
// Arrange
var content = """{"test":"data"}""";
// Act
var result = _compressor.Compress(content, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Sha256);
Assert.Equal(64, result.Sha256.Length); // SHA256 hex string length
Assert.Matches("^[a-f0-9]+$", result.Sha256); // Lowercase hex
}
[Fact]
public void Compress_DeterministicHash_SameContentSameHash()
{
// Arrange
var content = """{"test":"deterministic"}""";
// Act
var result1 = _compressor.Compress(content, CompressionFormat.Gzip);
var result2 = _compressor.Compress(content, CompressionFormat.Gzip);
// Assert
Assert.Equal(result1.Sha256, result2.Sha256);
Assert.Equal(result1.CompressedData, result2.CompressedData);
}
[Fact]
public void Decompress_Gzip_RestoresOriginalContent()
{
// Arrange
var original = """{"name":"test","value":42}""";
var compressed = _compressor.Compress(original, CompressionFormat.Gzip);
Assert.True(compressed.Success);
// Act
var decompressed = _compressor.Decompress(compressed.CompressedData!, CompressionFormat.Gzip);
// Assert
Assert.True(decompressed.Success);
Assert.Equal(original, Encoding.UTF8.GetString(decompressed.DecompressedData!));
}
[Fact]
public void Decompress_Brotli_RestoresOriginalContent()
{
// Arrange
var original = """{"name":"brotli-test"}""";
var compressed = _compressor.Compress(original, CompressionFormat.Brotli);
Assert.True(compressed.Success);
// Act
var decompressed = _compressor.Decompress(compressed.CompressedData!, CompressionFormat.Brotli);
// Assert
Assert.True(decompressed.Success);
Assert.Equal(original, Encoding.UTF8.GetString(decompressed.DecompressedData!));
}
[Fact]
public void Decompress_None_ReturnsUnmodifiedData()
{
// Arrange
var data = new byte[] { 1, 2, 3, 4, 5 };
// Act
var result = _compressor.Decompress(data, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(data, result.DecompressedData);
}
[Fact]
public void Decompress_InvalidData_ReturnsFailed()
{
// Arrange
var invalidData = new byte[] { 1, 2, 3, 4, 5 }; // Not valid gzip
// Act
var result = _compressor.Decompress(invalidData, CompressionFormat.Gzip);
// Assert
Assert.False(result.Success);
Assert.NotNull(result.ErrorMessage);
}
[Fact]
public async Task CompressToStreamAsync_Gzip_WritesToStream()
{
// Arrange
var content = new string('x', 500);
using var outputStream = new MemoryStream();
// Act
var result = await _compressor.CompressToStreamAsync(content, outputStream, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.True(outputStream.Length > 0);
Assert.True(result.CompressedSizeBytes < result.OriginalSizeBytes);
// Verify by decompressing
outputStream.Position = 0;
using var decompressStream = new GZipStream(outputStream, CompressionMode.Decompress);
using var reader = new StreamReader(decompressStream);
var decompressed = await reader.ReadToEndAsync();
Assert.Equal(content, decompressed);
}
[Fact]
public async Task CompressToStreamAsync_None_WritesBytesDirectly()
{
// Arrange
var content = "test content";
using var outputStream = new MemoryStream();
// Act
var result = await _compressor.CompressToStreamAsync(content, outputStream, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(Encoding.UTF8.GetByteCount(content), outputStream.Length);
}
[Fact]
public async Task CompressBytesToStreamAsync_WritesCompressedData()
{
// Arrange
var data = Encoding.UTF8.GetBytes(new string('y', 500));
using var outputStream = new MemoryStream();
// Act
var result = await _compressor.CompressBytesToStreamAsync(data, outputStream, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.True(outputStream.Length > 0);
Assert.True(outputStream.Length < data.Length);
}
[Theory]
[InlineData(CompressionFormat.Gzip, ".gz")]
[InlineData(CompressionFormat.Brotli, ".br")]
[InlineData(CompressionFormat.Zstd, ".zst")]
[InlineData(CompressionFormat.None, "")]
public void GetFileExtension_ReturnsCorrectExtension(CompressionFormat format, string expected)
{
Assert.Equal(expected, ExportCompressor.GetFileExtension(format));
}
[Theory]
[InlineData(CompressionFormat.Gzip, "application/gzip")]
[InlineData(CompressionFormat.Brotli, "application/br")]
[InlineData(CompressionFormat.Zstd, "application/zstd")]
[InlineData(CompressionFormat.None, "application/octet-stream")]
public void GetContentType_ReturnsCorrectContentType(CompressionFormat format, string expected)
{
Assert.Equal(expected, ExportCompressor.GetContentType(format));
}
[Fact]
public void CompressBytes_EmptyArray_Succeeds()
{
// Arrange
var empty = Array.Empty<byte>();
// Act
var result = _compressor.CompressBytes(empty, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
}
[Fact]
public void Compress_LargeContent_CompressesEfficiently()
{
// Arrange
var largeContent = new string('a', 100_000);
// Act
var result = _compressor.Compress(largeContent, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.True(result.CompressionRatio < 0.1); // Highly compressible content
}
[Fact]
public void Compress_RandomContent_HandlesUncompressibleData()
{
// Arrange - random data doesn't compress well
var random = new byte[1000];
new Random(42).NextBytes(random);
var randomString = Convert.ToBase64String(random);
// Act
var result = _compressor.Compress(randomString, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
// Random data may actually be larger after compression due to gzip overhead
Assert.NotNull(result.CompressedData);
}
}

View File

@@ -15,8 +15,8 @@ public sealed class JsonNormalizerTests
Assert.True(result.Success);
Assert.StartsWith("""{"alpha":""", result.NormalizedJson);
Assert.Contains(""""beta":""", result.NormalizedJson);
Assert.EndsWith(""""zebra":"z"}""", result.NormalizedJson);
Assert.Contains("\"beta\":", result.NormalizedJson);
Assert.EndsWith("\"zebra\":\"z\"}", result.NormalizedJson);
}
[Fact]

View File

@@ -0,0 +1,600 @@
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class JsonPolicyAdapterTests : IDisposable
{
private readonly string _tempDir;
private readonly JsonPolicyAdapter _adapter;
private readonly InMemoryExportDataFetcher _dataFetcher;
private readonly InMemoryExportPolicyEvaluator _policyEvaluator;
public JsonPolicyAdapterTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"export-policy-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_adapter = new JsonPolicyAdapter(NullLogger<JsonPolicyAdapter>.Instance);
_dataFetcher = new InMemoryExportDataFetcher();
_policyEvaluator = new InMemoryExportPolicyEvaluator();
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void AdapterId_IsJsonPolicy()
{
Assert.Equal("json:policy", _adapter.AdapterId);
}
[Fact]
public void DisplayName_IsSet()
{
Assert.Equal("JSON with Policy", _adapter.DisplayName);
}
[Fact]
public void SupportedFormats_IncludesJsonPolicyAndNdjson()
{
Assert.Contains(ExportFormat.JsonPolicy, _adapter.SupportedFormats);
Assert.Contains(ExportFormat.Ndjson, _adapter.SupportedFormats);
}
[Fact]
public void SupportsStreaming_IsTrue()
{
Assert.True(_adapter.SupportsStreaming);
}
[Fact]
public async Task ProcessAsync_SingleItem_CreatesWrappedJsonFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test-component");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.Single(result.ItemResults);
Assert.True(result.ItemResults[0].Success);
Assert.True(File.Exists(result.ItemResults[0].OutputPath));
Assert.Equal("sbom-test-component.policy.json", Path.GetFileName(result.ItemResults[0].OutputPath));
}
[Fact]
public async Task ProcessAsync_WrapsDataWithMetadata()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test", ["tag1", "tag2"]);
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Verify wrapper structure
Assert.True(root.TryGetProperty("metadata", out var metadata));
Assert.True(root.TryGetProperty("data", out var data));
// Verify metadata fields
Assert.Equal(itemId.ToString(), metadata.GetProperty("itemId").GetString());
Assert.Equal("sbom", metadata.GetProperty("kind").GetString());
Assert.Equal("test", metadata.GetProperty("name").GetString());
Assert.NotNull(metadata.GetProperty("sha256").GetString());
// Verify data content preserved
Assert.Equal("test", data.GetProperty("name").GetString());
}
[Fact]
public async Task ProcessAsync_WithPolicyEvaluator_IncludesPolicyMetadata()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
_policyEvaluator.AddPolicy(itemId, new PolicyMetadata
{
PolicyId = "policy-001",
PolicyName = "Security Policy",
PolicyVersion = "1.0",
Decision = "allow",
EvaluatedAt = DateTimeOffset.UtcNow,
Violations = []
});
var context = CreateContextWithPolicy([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
Assert.True(root.TryGetProperty("policy", out var policy));
Assert.Equal("policy-001", policy.GetProperty("policyId").GetString());
Assert.Equal("Security Policy", policy.GetProperty("policyName").GetString());
Assert.Equal("allow", policy.GetProperty("decision").GetString());
}
[Fact]
public async Task ProcessAsync_WithPolicyViolations_IncludesViolations()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
_policyEvaluator.AddPolicy(itemId, new PolicyMetadata
{
PolicyId = "policy-001",
Decision = "deny",
Violations =
[
new PolicyViolation
{
RuleId = "CVE-001",
Severity = "critical",
Message = "Critical vulnerability found",
Path = "$.components[0]"
}
]
});
var context = CreateContextWithPolicy([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var violations = doc.RootElement.GetProperty("policy").GetProperty("violations");
Assert.Equal(1, violations.GetArrayLength());
Assert.Equal("CVE-001", violations[0].GetProperty("ruleId").GetString());
Assert.Equal("critical", violations[0].GetProperty("severity").GetString());
}
[Fact]
public async Task ProcessAsync_WithoutPolicyEvaluator_PolicyIsNull()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Policy should be null when no evaluator
Assert.False(root.TryGetProperty("policy", out _) &&
root.GetProperty("policy").ValueKind != JsonValueKind.Null);
}
[Fact]
public async Task ProcessAsync_NdjsonFormat_CreatesWrappedNdjsonFile()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 3; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"component-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "index": {{i}} }""");
}
var context = CreateContext(items, ExportFormat.Ndjson);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.EndsWith("-policy.ndjson", result.Artifacts[0].Path);
Assert.Equal(3, result.Artifacts[0].ItemCount);
// Verify NDJSON content - each line should be a wrapped item
var content = await File.ReadAllTextAsync(result.Artifacts[0].Path);
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(3, lines.Length);
// Each line should have metadata and data
foreach (var line in lines)
{
using var doc = JsonDocument.Parse(line);
Assert.True(doc.RootElement.TryGetProperty("metadata", out _));
Assert.True(doc.RootElement.TryGetProperty("data", out _));
}
}
[Fact]
public async Task ProcessAsync_WithGzipCompression_CreatesCompressedFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy, CompressionFormat.Gzip);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.EndsWith(".policy.json.gz", result.Artifacts[0].Path);
Assert.True(result.Artifacts[0].IsCompressed);
// Verify decompression works
var compressedBytes = await File.ReadAllBytesAsync(result.Artifacts[0].Path);
using var ms = new MemoryStream(compressedBytes);
using var gzip = new GZipStream(ms, CompressionMode.Decompress);
using var reader = new StreamReader(gzip);
var decompressed = await reader.ReadToEndAsync();
Assert.Contains("metadata", decompressed);
}
[Fact]
public async Task ProcessAsync_IncludesChecksums_CreatesChecksumFiles()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy, includeChecksums: true);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var checksumPath = result.ItemResults[0].OutputPath + ".sha256";
Assert.True(File.Exists(checksumPath));
}
[Fact]
public async Task ProcessAsync_ManifestCounts_TracksCorrectly()
{
// Arrange
var items = new List<ResolvedExportItem>();
// Add 2 successful sbom items
for (var i = 0; i < 2; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"sbom-{i}"));
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
}
// Add 1 successful vex item
var vexItemId = Guid.NewGuid();
items.Add(CreateItem(vexItemId, "vex", "vex-1"));
_dataFetcher.AddContent(vexItemId, """{"name":"vex"}""");
// Add 1 failing item
var failingItemId = Guid.NewGuid();
items.Add(CreateItem(failingItemId, "attestation", "fail"));
var context = CreateContext(items, ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Equal(4, result.ManifestCounts.TotalItems);
Assert.Equal(3, result.ManifestCounts.SuccessfulItems);
Assert.Equal(1, result.ManifestCounts.FailedItems);
Assert.Equal(2, result.ManifestCounts.ByKind["sbom"]);
Assert.Equal(1, result.ManifestCounts.ByKind["vex"]);
Assert.Equal(1, result.ManifestCounts.ByKind["attestation"]);
}
[Fact]
public async Task ProcessAsync_FetchFailure_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
// Don't add content - will cause fetch failure
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success); // Overall success, individual failure
Assert.Single(result.ItemResults);
Assert.False(result.ItemResults[0].Success);
}
[Fact]
public async Task ProcessStreamAsync_YieldsResultsProgressively()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 5; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"item-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "index": {{i}} }""");
}
var context = CreateContext(items, ExportFormat.JsonPolicy);
// Act
var results = new List<AdapterItemResult>();
await foreach (var result in _adapter.ProcessStreamAsync(context))
{
results.Add(result);
}
// Assert
Assert.Equal(5, results.Count);
Assert.All(results, r => Assert.True(r.Success));
}
[Fact]
public async Task ValidateConfigAsync_MissingOutputDirectory_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = "",
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonPolicy }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("Output directory", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_UnsupportedFormat_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.Mirror }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("not supported", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_ValidConfig_ReturnsNoErrors()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonPolicy }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.Empty(errors);
}
[Fact]
public async Task ProcessAsync_NormalizesJson_SortsKeys()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"zebra":"z","alpha":"a"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
// The data object inside should be sorted
using var doc = JsonDocument.Parse(content);
var dataJson = doc.RootElement.GetProperty("data").GetRawText();
Assert.StartsWith("""{"alpha":""", dataJson);
}
[Fact]
public async Task ProcessAsync_WithRedaction_RedactsSensitiveFields()
{
// Arrange
var adapter = new JsonPolicyAdapter(
NullLogger<JsonPolicyAdapter>.Instance,
new JsonNormalizationOptions { SortKeys = true },
new JsonRedactionOptions { RedactFields = ["secretKey"] });
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","secretKey":"hidden123"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
Assert.DoesNotContain("hidden123", content);
Assert.Contains("[REDACTED]", content);
}
[Fact]
public async Task ProcessAsync_MetadataIncludesExportTimestamp()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var fixedTime = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(fixedTime);
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonPolicy },
IncludeChecksums = false
};
var context = new ExportAdapterContext
{
Config = config,
Items = [item],
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid(),
TimeProvider = timeProvider
};
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var exportedAt = doc.RootElement.GetProperty("metadata").GetProperty("exportedAt").GetString();
Assert.Contains("2025-01-15", exportedAt);
}
private ResolvedExportItem CreateItem(Guid itemId, string kind, string name, IReadOnlyList<string>? tags = null)
{
return new ResolvedExportItem
{
ItemId = itemId,
Kind = kind,
Name = name,
SourceRef = $"test://{name}",
Tags = tags ?? [],
CreatedAt = DateTimeOffset.UtcNow
};
}
private ExportAdapterContext CreateContext(
IReadOnlyList<ResolvedExportItem> items,
ExportFormat format,
CompressionFormat compression = CompressionFormat.None,
bool includeChecksums = true)
{
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions
{
Format = format,
Compression = compression
},
IncludeChecksums = includeChecksums
};
return new ExportAdapterContext
{
Config = config,
Items = items,
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
}
private ExportAdapterContext CreateContextWithPolicy(
IReadOnlyList<ResolvedExportItem> items,
ExportFormat format)
{
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = format },
IncludeChecksums = false
};
return new ExportAdapterContext
{
Config = config,
Items = items,
DataFetcher = _dataFetcher,
PolicyEvaluator = _policyEvaluator,
TenantId = Guid.NewGuid()
};
}
private sealed class FakeTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTime;
public FakeTimeProvider(DateTimeOffset fixedTime)
{
_fixedTime = fixedTime;
}
public override DateTimeOffset GetUtcNow() => _fixedTime;
}
}

View File

@@ -0,0 +1,598 @@
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class JsonRawAdapterTests : IDisposable
{
private readonly string _tempDir;
private readonly JsonRawAdapter _adapter;
private readonly InMemoryExportDataFetcher _dataFetcher;
public JsonRawAdapterTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"export-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_adapter = new JsonRawAdapter(NullLogger<JsonRawAdapter>.Instance);
_dataFetcher = new InMemoryExportDataFetcher();
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void AdapterId_IsJsonRaw()
{
Assert.Equal("json:raw", _adapter.AdapterId);
}
[Fact]
public void DisplayName_IsSet()
{
Assert.Equal("JSON Raw", _adapter.DisplayName);
}
[Fact]
public void SupportedFormats_IncludesJsonRawAndNdjson()
{
Assert.Contains(ExportFormat.JsonRaw, _adapter.SupportedFormats);
Assert.Contains(ExportFormat.Ndjson, _adapter.SupportedFormats);
}
[Fact]
public void SupportsStreaming_IsTrue()
{
Assert.True(_adapter.SupportsStreaming);
}
[Fact]
public async Task ProcessAsync_SingleItem_CreatesJsonFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test-component");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.Single(result.ItemResults);
Assert.True(result.ItemResults[0].Success);
Assert.True(File.Exists(result.ItemResults[0].OutputPath));
Assert.Equal("sbom-test-component.json", Path.GetFileName(result.ItemResults[0].OutputPath));
}
[Fact]
public async Task ProcessAsync_MultipleItems_CreatesMultipleFiles()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 3; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"component-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "name": "component-{{i}}", "version": "1.0.0" }""");
}
var context = CreateContext(items, ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Equal(3, result.Artifacts.Count);
Assert.Equal(3, result.ItemResults.Count);
Assert.All(result.ItemResults, r => Assert.True(r.Success));
Assert.All(result.Artifacts, a => Assert.True(File.Exists(a.Path)));
}
[Fact]
public async Task ProcessAsync_NdjsonFormat_CreatesSingleFile()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 3; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"component-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "name": "component-{{i}}" }""");
}
var context = CreateContext(items, ExportFormat.Ndjson);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.Equal(3, result.ItemResults.Count);
Assert.EndsWith(".ndjson", result.Artifacts[0].Path);
Assert.Equal(3, result.Artifacts[0].ItemCount);
// Verify NDJSON format
var content = await File.ReadAllTextAsync(result.Artifacts[0].Path);
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(3, lines.Length);
}
[Fact]
public async Task ProcessAsync_WithGzipCompression_CreatesCompressedFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, CompressionFormat.Gzip);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.EndsWith(".json.gz", result.Artifacts[0].Path);
Assert.True(result.Artifacts[0].IsCompressed);
Assert.Equal(CompressionFormat.Gzip, result.Artifacts[0].Compression);
// Verify it's actually gzip compressed
var compressedBytes = await File.ReadAllBytesAsync(result.Artifacts[0].Path);
using var ms = new MemoryStream(compressedBytes);
using var gzip = new GZipStream(ms, CompressionMode.Decompress);
using var reader = new StreamReader(gzip);
var decompressed = await reader.ReadToEndAsync();
Assert.Contains("test", decompressed);
}
[Fact]
public async Task ProcessAsync_WithBrotliCompression_CreatesCompressedFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, CompressionFormat.Brotli);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.EndsWith(".json.br", result.Artifacts[0].Path);
Assert.True(result.Artifacts[0].IsCompressed);
Assert.Equal(CompressionFormat.Brotli, result.Artifacts[0].Compression);
}
[Fact]
public async Task ProcessAsync_IncludesChecksums_CreatesChecksumFiles()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, includeChecksums: true);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var checksumPath = result.ItemResults[0].OutputPath + ".sha256";
Assert.True(File.Exists(checksumPath));
var checksumContent = await File.ReadAllTextAsync(checksumPath);
Assert.Contains("sbom-test.json", checksumContent);
Assert.Equal(64 + 2 + "sbom-test.json".Length + 1, checksumContent.Length); // hash + " " + filename + newline
}
[Fact]
public async Task ProcessAsync_DisabledChecksums_NoChecksumFiles()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, includeChecksums: false);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var checksumPath = result.ItemResults[0].OutputPath + ".sha256";
Assert.False(File.Exists(checksumPath));
}
[Fact]
public async Task ProcessAsync_NormalizesJson_SortsKeys()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"zebra":"z","alpha":"a"}""");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
// Keys should be sorted alphabetically
Assert.StartsWith("""{"alpha":""", content);
}
[Fact]
public async Task ProcessAsync_ManifestCounts_TracksCorrectly()
{
// Arrange
var items = new List<ResolvedExportItem>();
// Add 2 successful items
for (var i = 0; i < 2; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"success-{i}"));
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
}
// Add 1 item that will fail (no content)
var failingItemId = Guid.NewGuid();
items.Add(CreateItem(failingItemId, "vex", "fail"));
// Don't add content - will cause fetch failure
var context = CreateContext(items, ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Equal(3, result.ManifestCounts.TotalItems);
Assert.Equal(2, result.ManifestCounts.SuccessfulItems);
Assert.Equal(1, result.ManifestCounts.FailedItems);
Assert.Equal(2, result.ManifestCounts.ByKind["sbom"]);
Assert.Equal(1, result.ManifestCounts.ByKind["vex"]);
}
[Fact]
public async Task ProcessAsync_FetchFailure_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
// Don't add content - will cause fetch failure
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success); // Overall success, individual failure
Assert.Single(result.ItemResults);
Assert.False(result.ItemResults[0].Success);
Assert.Contains("not found", result.ItemResults[0].ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task ProcessAsync_EmptyContent_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, "");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.False(result.ItemResults[0].Success);
Assert.Contains("empty", result.ItemResults[0].ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task ProcessAsync_InvalidJson_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, "{invalid json}");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.False(result.ItemResults[0].Success);
}
[Fact]
public async Task ProcessStreamAsync_YieldsResultsProgressively()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 5; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"item-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "index": {{i}} }""");
}
var context = CreateContext(items, ExportFormat.JsonRaw);
// Act
var results = new List<AdapterItemResult>();
await foreach (var result in _adapter.ProcessStreamAsync(context))
{
results.Add(result);
}
// Assert
Assert.Equal(5, results.Count);
Assert.All(results, r => Assert.True(r.Success));
}
[Fact]
public async Task ProcessStreamAsync_CancellationStopsProcessing()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 10; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"item-{i}"));
_dataFetcher.AddContent(itemId, """{"test":true}""");
}
var context = CreateContext(items, ExportFormat.JsonRaw);
using var cts = new CancellationTokenSource();
// Act
var count = 0;
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
{
await foreach (var result in _adapter.ProcessStreamAsync(context, cts.Token))
{
count++;
if (count >= 3)
{
cts.Cancel();
}
}
});
// Assert
Assert.True(count >= 3);
Assert.True(count < 10);
}
[Fact]
public async Task ValidateConfigAsync_MissingOutputDirectory_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = "",
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonRaw }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("Output directory", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_UnsupportedFormat_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.Csv }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("not supported", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_ValidConfig_ReturnsNoErrors()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonRaw }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.Empty(errors);
}
[Fact]
public async Task ProcessAsync_PrettyPrint_FormatsOutput()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions
{
Format = ExportFormat.JsonRaw,
PrettyPrint = true
},
IncludeChecksums = false
};
var context = new ExportAdapterContext
{
Config = config,
Items = [item],
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
Assert.Contains("\n", content); // Pretty printed has newlines
}
[Fact]
public async Task ProcessAsync_WithRedaction_RedactsSensitiveFields()
{
// Arrange
var adapter = new JsonRawAdapter(
NullLogger<JsonRawAdapter>.Instance,
new JsonNormalizationOptions { SortKeys = true },
new JsonRedactionOptions { RedactFields = ["apiKey"] });
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","apiKey":"secret123"}""");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
Assert.DoesNotContain("secret123", content);
Assert.Contains("[REDACTED]", content);
}
[Fact]
public async Task ProcessAsync_DeterministicOutput_SameInputSameHash()
{
// Arrange
var itemId1 = Guid.NewGuid();
var item1 = CreateItem(itemId1, "sbom", "test");
_dataFetcher.AddContent(itemId1, """{"z":"2","a":"1"}""");
var context1 = CreateContext([item1], ExportFormat.JsonRaw);
var result1 = await _adapter.ProcessAsync(context1);
// Reset for second run
var dir2 = Path.Combine(Path.GetTempPath(), $"export-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(dir2);
try
{
var itemId2 = Guid.NewGuid();
var item2 = CreateItem(itemId2, "sbom", "test");
_dataFetcher.AddContent(itemId2, """{"a":"1","z":"2"}"""); // Same data, different order
var config2 = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = dir2,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonRaw }
};
var context2 = new ExportAdapterContext
{
Config = config2,
Items = [item2],
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
var result2 = await _adapter.ProcessAsync(context2);
// Assert - both should have same content hash after normalization
var content1 = await File.ReadAllTextAsync(result1.ItemResults[0].OutputPath!);
var content2 = await File.ReadAllTextAsync(result2.ItemResults[0].OutputPath!);
Assert.Equal(content1, content2);
}
finally
{
Directory.Delete(dir2, recursive: true);
}
}
private ResolvedExportItem CreateItem(Guid itemId, string kind, string name)
{
return new ResolvedExportItem
{
ItemId = itemId,
Kind = kind,
Name = name,
SourceRef = $"test://{name}",
CreatedAt = DateTimeOffset.UtcNow
};
}
private ExportAdapterContext CreateContext(
IReadOnlyList<ResolvedExportItem> items,
ExportFormat format,
CompressionFormat compression = CompressionFormat.None,
bool includeChecksums = true)
{
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions
{
Format = format,
Compression = compression
},
IncludeChecksums = includeChecksums
};
return new ExportAdapterContext
{
Config = config,
Items = items,
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
}
}

View File

@@ -0,0 +1,394 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivyDbAdapterTests
{
private readonly TrivyAdapterOptions _defaultOptions;
private readonly TrivyDbAdapter _adapter;
public TrivyDbAdapterTests()
{
_defaultOptions = new TrivyAdapterOptions();
var options = Options.Create(_defaultOptions);
_adapter = new TrivyDbAdapter(options, NullLogger<TrivyDbAdapter>.Instance);
}
[Fact]
public void Name_ReturnsTrivyDb()
{
Assert.Equal("trivy:db", _adapter.Name);
}
[Fact]
public void AdapterId_ReturnsExpected()
{
Assert.Equal("adapter:trivy:db", _adapter.AdapterId);
}
[Fact]
public void SchemaVersion_ReturnsV2()
{
Assert.Equal(TrivySchemaVersion.V2, _adapter.SchemaVersion);
}
[Fact]
public void ValidateConfiguration_WithV2_Succeeds()
{
// Should not throw
_adapter.ValidateConfiguration();
}
[Fact]
public void ValidateConfiguration_WithV3_Throws()
{
var options = new TrivyAdapterOptions { SchemaVersion = 3 };
var adapter = new TrivyDbAdapter(Options.Create(options), NullLogger<TrivyDbAdapter>.Instance);
var exception = Assert.Throws<TrivyAdapterException>(() => adapter.ValidateConfiguration());
Assert.Equal(TrivyAdapterErrors.UnsupportedSchemaVersion, exception.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithValidAdvisory_ReturnsValid()
{
var advisory = CreateValidAdvisory();
var result = _adapter.ValidateAdvisory(advisory);
Assert.True(result.IsValid);
}
[Fact]
public void ValidateAdvisory_WithNoIdentifiers_ReturnsInvalid()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers()
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.False(result.IsValid);
Assert.Equal(TrivyAdapterErrors.InvalidAdvisory, result.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithNoVendor_ReturnsInvalid()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.False(result.IsValid);
Assert.Equal(TrivyAdapterErrors.InvalidAdvisory, result.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithUnsupportedNamespace_ReturnsInvalid()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "UnsupportedVendor" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.False(result.IsValid);
Assert.Equal(TrivyAdapterErrors.UnsupportedNamespace, result.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithMissingSeverity_ReturnsValidWithWarning()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
// No severity or CVSS
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.True(result.IsValid);
Assert.NotNull(result.Warnings);
Assert.Contains(result.Warnings, w => w.Contains("UNKNOWN severity"));
}
[Fact]
public void TransformAdvisory_WithValidAdvisory_ReturnsRecords()
{
var advisory = CreateValidAdvisory();
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
var record = records[0];
Assert.Equal("ubuntu:22.04", record.Namespace);
Assert.Equal("openssl", record.Package.Name);
Assert.Equal("CVE-2024-12345", record.Vulnerability.Id);
Assert.Equal("HIGH", record.Vulnerability.Severity);
}
[Fact]
public void TransformAdvisory_WithUnsupportedNamespace_ReturnsEmptyList()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "UnsupportedVendor" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage { Name = "some-package" }
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.Empty(records);
}
[Fact]
public void TransformAdvisory_MapsSeverityCorrectly()
{
var testCases = new (string input, string expected)[]
{
("critical", "CRITICAL"),
("high", "HIGH"),
("medium", "MEDIUM"),
("low", "LOW"),
("none", "UNKNOWN"),
("info", "UNKNOWN")
};
foreach (var (input, expected) in testCases)
{
var advisory = CreateValidAdvisory();
advisory = advisory with
{
Severity = new TrivyAdapterSeverity { Normalized = input }
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.Equal(expected, records[0].Vulnerability.Severity);
}
}
[Fact]
public void TransformAdvisory_TruncatesTitleToMaxLength()
{
var longTitle = new string('A', 300);
var advisory = CreateValidAdvisory();
advisory = advisory with { Summary = longTitle };
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.NotNull(records[0].Vulnerability.Title);
Assert.Equal(256, records[0].Vulnerability.Title!.Length);
Assert.NotNull(records[0].Vulnerability.Description);
Assert.Contains("A", records[0].Vulnerability.Description!);
}
[Fact]
public void TransformAdvisory_WithCvssButNoSeverity_DerivesSeverityFromScore()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Severity = null,
Cvss =
[
new TrivyAdapterCvss { Vector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", Score = 9.8 }
],
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage { Name = "openssl" }
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.Equal("CRITICAL", records[0].Vulnerability.Severity);
}
[Fact]
public async Task TransformAsync_WithMultipleAdvisories_ProducesUniqueRecords()
{
var advisories = AsyncEnumerable([
CreateValidAdvisory(),
CreateValidAdvisory(), // Duplicate
CreateValidAdvisory() with
{
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-67890"] }
}
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Records.Count); // Only 2 unique records
Assert.Equal(1, result.DuplicatesRemoved);
Assert.Equal(3, result.TotalInputRecords);
}
[Fact]
public async Task TransformAsync_WithEmptyInput_ThrowsWhenAllowEmptyIsFalse()
{
var advisories = AsyncEnumerable(Array.Empty<TrivyAdapterInputAdvisory>());
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
await Assert.ThrowsAsync<TrivyAdapterException>(
() => _adapter.TransformAsync(advisories, context));
}
[Fact]
public async Task TransformAsync_WithEmptyInput_SucceedsWhenAllowEmptyIsTrue()
{
var options = new TrivyAdapterOptions { AllowEmpty = true };
var adapter = new TrivyDbAdapter(Options.Create(options), NullLogger<TrivyDbAdapter>.Instance);
var advisories = AsyncEnumerable(Array.Empty<TrivyAdapterInputAdvisory>());
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await adapter.TransformAsync(advisories, context);
Assert.Empty(result.Records);
}
[Fact]
public async Task TransformAsync_ProducesCorrectMetadata()
{
var advisories = AsyncEnumerable([CreateValidAdvisory()]);
var generatedAt = new DateTimeOffset(2025, 12, 11, 12, 0, 0, TimeSpan.Zero);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant",
PolicySnapshotId = "policy-snap-42",
GeneratedAt = generatedAt
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Metadata.SchemaVersion);
Assert.Equal(generatedAt, result.Metadata.UpdatedAt);
Assert.NotNull(result.Metadata.Stella);
Assert.Equal("test-run-1", result.Metadata.Stella.RunId);
Assert.Equal("test-profile-1", result.Metadata.Stella.ProfileId);
Assert.Equal("test-tenant", result.Metadata.Stella.Tenant);
Assert.Equal("policy-snap-42", result.Metadata.Stella.PolicySnapshotId);
}
[Fact]
public void IsNamespaceSupported_WithKnownNamespaces_ReturnsTrue()
{
var supported = new[] { "Ubuntu", "Debian", "Alpine", "Red Hat" };
foreach (var vendor in supported)
{
Assert.True(_adapter.IsNamespaceSupported(vendor, null), $"{vendor} should be supported");
}
}
[Fact]
public void IsNamespaceSupported_WithUnknownNamespace_ReturnsFalse()
{
Assert.False(_adapter.IsNamespaceSupported("UnknownVendor", null));
}
[Fact]
public void IsEcosystemSupported_WithKnownEcosystems_ReturnsTrue()
{
var supported = new[] { "npm", "pip", "nuget", "go", "cargo" };
foreach (var ecosystem in supported)
{
Assert.True(_adapter.IsEcosystemSupported(ecosystem), $"{ecosystem} should be supported");
}
}
[Fact]
public void IsEcosystemSupported_WithJavaEcosystem_ReturnsTrue()
{
// Java ecosystems are supported for routing but handled by Java DB adapter
Assert.True(_adapter.IsEcosystemSupported("maven"));
}
private static TrivyAdapterInputAdvisory CreateValidAdvisory()
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers
{
Cve = ["CVE-2024-12345"]
},
Summary = "Test vulnerability",
Description = "A test vulnerability description.",
Severity = new TrivyAdapterSeverity { Normalized = "high" },
Published = DateTimeOffset.UtcNow.AddDays(-30),
Modified = DateTimeOffset.UtcNow.AddDays(-1),
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "openssl",
Ecosystem = "ubuntu",
Nevra = "1.1.1f-1ubuntu2.12"
},
VulnerableRange = "< 1.1.1f-1ubuntu2.13",
Remediations =
[
new TrivyAdapterRemediation { FixedVersion = "1.1.1f-1ubuntu2.13" }
]
}
]
};
}
private static async IAsyncEnumerable<T> AsyncEnumerable<T>(T[] items)
{
foreach (var item in items)
{
yield return item;
}
await Task.CompletedTask;
}
}

View File

@@ -0,0 +1,453 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivyJavaDbAdapterTests
{
private readonly TrivyAdapterOptions _defaultOptions;
private readonly TrivyJavaDbAdapter _adapter;
public TrivyJavaDbAdapterTests()
{
_defaultOptions = new TrivyAdapterOptions { IncludeJavaDb = true };
var options = Options.Create(_defaultOptions);
_adapter = new TrivyJavaDbAdapter(options, NullLogger<TrivyJavaDbAdapter>.Instance);
}
[Fact]
public void Name_ReturnsTrivyJavaDb()
{
Assert.Equal("trivy:java-db", _adapter.Name);
}
[Fact]
public void AdapterId_ReturnsExpected()
{
Assert.Equal("adapter:trivy:java-db", _adapter.AdapterId);
}
[Fact]
public void SupportedEcosystems_ContainsMavenGradleSbt()
{
Assert.Contains("maven", _adapter.SupportedEcosystems);
Assert.Contains("gradle", _adapter.SupportedEcosystems);
Assert.Contains("sbt", _adapter.SupportedEcosystems);
}
[Fact]
public void ValidateConfiguration_WithV2_Succeeds()
{
_adapter.ValidateConfiguration();
}
[Fact]
public void ValidateConfiguration_WithV3_Throws()
{
var options = new TrivyAdapterOptions { SchemaVersion = 3 };
var adapter = new TrivyJavaDbAdapter(Options.Create(options), NullLogger<TrivyJavaDbAdapter>.Instance);
var exception = Assert.Throws<TrivyAdapterException>(() => adapter.ValidateConfiguration());
Assert.Equal(TrivyAdapterErrors.UnsupportedSchemaVersion, exception.ErrorCode);
}
[Fact]
public void HasJavaPackages_WithMavenPackage_ReturnsTrue()
{
var advisory = CreateMavenAdvisory();
Assert.True(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithGradlePackage_ReturnsTrue()
{
var advisory = CreateAdvisoryWithEcosystem("gradle");
Assert.True(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithSbtPackage_ReturnsTrue()
{
var advisory = CreateAdvisoryWithEcosystem("sbt");
Assert.True(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithNpmPackage_ReturnsFalse()
{
var advisory = CreateAdvisoryWithEcosystem("npm");
Assert.False(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithNoAffects_ReturnsFalse()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
};
Assert.False(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void TransformAdvisory_WithMavenPackage_ReturnsRecords()
{
var advisory = CreateMavenAdvisory();
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
var record = records[0];
Assert.Equal("maven", record.Namespace);
Assert.Equal("org.apache.logging.log4j", record.Package.GroupId);
Assert.Equal("log4j-core", record.Package.ArtifactId);
Assert.Equal("org.apache.logging.log4j:log4j-core", record.Package.Name);
Assert.Equal("CVE-2021-44228", record.Vulnerability.Id);
}
[Fact]
public void TransformAdvisory_WithNonJavaPackage_ReturnsEmptyList()
{
var advisory = CreateAdvisoryWithEcosystem("npm");
var records = _adapter.TransformAdvisory(advisory);
Assert.Empty(records);
}
[Fact]
public void ParseMavenCoordinates_WithPurl_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
null,
"pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1");
Assert.NotNull(coords);
Assert.Equal("org.apache.logging.log4j", coords.GroupId);
Assert.Equal("log4j-core", coords.ArtifactId);
Assert.Equal("2.14.1", coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithPurlNoVersion_ReturnsCoordinatesWithoutVersion()
{
var coords = _adapter.ParseMavenCoordinates(
null,
"pkg:maven/com.example/my-artifact");
Assert.NotNull(coords);
Assert.Equal("com.example", coords.GroupId);
Assert.Equal("my-artifact", coords.ArtifactId);
Assert.Null(coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithColonFormat_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
"org.springframework:spring-core:5.3.0",
null);
Assert.NotNull(coords);
Assert.Equal("org.springframework", coords.GroupId);
Assert.Equal("spring-core", coords.ArtifactId);
Assert.Equal("5.3.0", coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithColonFormatNoVersion_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
"com.google.guava:guava",
null);
Assert.NotNull(coords);
Assert.Equal("com.google.guava", coords.GroupId);
Assert.Equal("guava", coords.ArtifactId);
Assert.Null(coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithSlashFormat_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
"org.example/artifact-name",
null);
Assert.NotNull(coords);
Assert.Equal("org.example", coords.GroupId);
Assert.Equal("artifact-name", coords.ArtifactId);
}
[Fact]
public void ParseMavenCoordinates_WithInvalidFormat_ReturnsNull()
{
var coords = _adapter.ParseMavenCoordinates(
"single-name-no-separator",
null);
Assert.Null(coords);
}
[Theory]
[InlineData("< 2.15.0", "(,2.15.0)")]
[InlineData("<= 2.15.0", "(,2.15.0]")]
[InlineData("> 1.0.0", "(1.0.0,)")]
[InlineData(">= 1.0.0", "[1.0.0,)")]
[InlineData("= 2.0.0", "[2.0.0]")]
[InlineData("[1.0.0,2.0.0)", "[1.0.0,2.0.0)")]
public void TransformAdvisory_ConvertsVersionRangeToMavenFormat(string input, string expected)
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "org.example:test-artifact",
Ecosystem = "maven"
},
VulnerableRange = input
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.NotNull(records[0].Package.VulnerableVersions);
Assert.Contains(expected, records[0].Package.VulnerableVersions!);
}
[Fact]
public async Task TransformAsync_WithMultipleAdvisories_DeduplicatesRecords()
{
var advisories = AsyncEnumerable([
CreateMavenAdvisory(),
CreateMavenAdvisory(), // Duplicate
CreateAdvisoryWithDifferentCve()
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Records.Count);
Assert.Equal(1, result.DuplicatesRemoved);
}
[Fact]
public async Task TransformAsync_WithMixedEcosystems_FiltersToJavaOnly()
{
var advisories = AsyncEnumerable([
CreateMavenAdvisory(),
CreateAdvisoryWithEcosystem("npm"), // Should be skipped
CreateAdvisoryWithEcosystem("pip") // Should be skipped
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Single(result.Records);
Assert.Equal(2, result.SkippedNonJavaEcosystem);
}
[Fact]
public async Task TransformAsync_ProducesCorrectMetadata()
{
var advisories = AsyncEnumerable([CreateMavenAdvisory()]);
var generatedAt = new DateTimeOffset(2025, 12, 11, 12, 0, 0, TimeSpan.Zero);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant",
GeneratedAt = generatedAt
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Metadata.SchemaVersion);
Assert.Contains("maven", result.Metadata.Ecosystems);
Assert.Contains("gradle", result.Metadata.Ecosystems);
Assert.Contains("sbt", result.Metadata.Ecosystems);
Assert.Equal(generatedAt, result.Metadata.UpdatedAt);
Assert.NotNull(result.Metadata.Stella);
}
[Fact]
public async Task TransformAsync_RecordsAreSortedDeterministically()
{
var advisories = AsyncEnumerable([
CreateAdvisoryWithGroupArtifact("z.group", "z-artifact", "CVE-2024-00003"),
CreateAdvisoryWithGroupArtifact("a.group", "a-artifact", "CVE-2024-00001"),
CreateAdvisoryWithGroupArtifact("a.group", "b-artifact", "CVE-2024-00002")
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(3, result.Records.Count);
Assert.Equal("a.group", result.Records[0].Package.GroupId);
Assert.Equal("a-artifact", result.Records[0].Package.ArtifactId);
Assert.Equal("a.group", result.Records[1].Package.GroupId);
Assert.Equal("b-artifact", result.Records[1].Package.ArtifactId);
Assert.Equal("z.group", result.Records[2].Package.GroupId);
}
[Fact]
public void TransformAdvisory_WithGroupAndArtifactInPackage_UsesDirectCoordinates()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "some-name",
Ecosystem = "maven",
Group = "direct.group",
Artifact = "direct-artifact"
}
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.Equal("direct.group", records[0].Package.GroupId);
Assert.Equal("direct-artifact", records[0].Package.ArtifactId);
}
private static TrivyAdapterInputAdvisory CreateMavenAdvisory()
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers
{
Cve = ["CVE-2021-44228"]
},
Summary = "Log4j RCE vulnerability",
Severity = new TrivyAdapterSeverity { Normalized = "critical" },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "org.apache.logging.log4j:log4j-core",
Ecosystem = "maven",
Purl = "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1"
},
VulnerableRange = "< 2.15.0",
Remediations =
[
new TrivyAdapterRemediation { FixedVersion = "2.15.0" }
]
}
]
};
}
private static TrivyAdapterInputAdvisory CreateAdvisoryWithEcosystem(string ecosystem)
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = ecosystem == "maven" ? "org.example:test" : "test-package",
Ecosystem = ecosystem
}
}
]
};
}
private static TrivyAdapterInputAdvisory CreateAdvisoryWithDifferentCve()
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2021-45046"] },
Summary = "Log4j second vulnerability",
Severity = new TrivyAdapterSeverity { Normalized = "critical" },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "org.apache.logging.log4j:log4j-core",
Ecosystem = "maven"
},
VulnerableRange = "< 2.16.0"
}
]
};
}
private static TrivyAdapterInputAdvisory CreateAdvisoryWithGroupArtifact(
string groupId, string artifactId, string cve)
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = [cve] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = $"{groupId}:{artifactId}",
Ecosystem = "maven"
}
}
]
};
}
private static async IAsyncEnumerable<T> AsyncEnumerable<T>(T[] items)
{
foreach (var item in items)
{
yield return item;
}
await Task.CompletedTask;
}
}

View File

@@ -0,0 +1,172 @@
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivyNamespaceMapperTests
{
private readonly TrivyNamespaceMapper _mapper;
public TrivyNamespaceMapperTests()
{
_mapper = new TrivyNamespaceMapper(new TrivyAdapterOptions());
}
[Theory]
[InlineData("Ubuntu", "22.04", "ubuntu:22.04")]
[InlineData("Ubuntu", "20.04", "ubuntu:20.04")]
[InlineData("Ubuntu", "18.04", "ubuntu:18.04")]
[InlineData("Ubuntu", "24.04", "ubuntu:24.04")]
[InlineData("Debian", "11", "debian:11")]
[InlineData("Debian", "12", "debian:12")]
[InlineData("Alpine", "3.18", "alpine:3.18")]
[InlineData("Alpine", "3.19", "alpine:3.19")]
public void MapNamespace_WithKnownDistribution_ReturnsCorrectMapping(
string vendor, string product, string expected)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
Assert.Equal(NamespaceKind.Distribution, result.Kind);
}
[Theory]
[InlineData("Red Hat Enterprise Linux 8", null, "redhat:8")]
[InlineData("RHEL 9", null, "redhat:9")]
[InlineData("Amazon Linux 2", null, "amazon:2")]
[InlineData("AL2023", null, "amazon:2023")]
[InlineData("Rocky Linux 9", null, "rocky:9")]
[InlineData("Oracle Linux 8", null, "oracle:8")]
public void MapNamespace_WithCodeNames_ReturnsCorrectMapping(
string vendor, string? product, string expected)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
[Fact]
public void MapNamespace_WithDebianCodenames_ReturnsCorrectMapping()
{
var testCases = new (string vendor, string? product, string expected)[]
{
("Debian Bookworm", null, "debian:12"),
("Debian Bullseye", null, "debian:11"),
("Debian Buster", null, "debian:10")
};
foreach (var (vendor, product, expected) in testCases)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
}
[Fact]
public void MapNamespace_WithUbuntuCodenames_ReturnsCorrectMapping()
{
var testCases = new (string vendor, string? product, string expected)[]
{
("Ubuntu Jammy", null, "ubuntu:22.04"),
("Ubuntu Focal", null, "ubuntu:20.04"),
("Ubuntu Bionic", null, "ubuntu:18.04")
};
foreach (var (vendor, product, expected) in testCases)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void MapNamespace_WithNullOrEmptyVendor_ReturnsNull(string? vendor)
{
var result = _mapper.MapNamespace(vendor, null);
Assert.Null(result);
}
[Fact]
public void MapNamespace_WithUnsupportedVendor_ReturnsNull()
{
var result = _mapper.MapNamespace("UnsupportedVendor", null);
Assert.Null(result);
}
[Theory]
[InlineData("npm", "npm", NamespaceKind.OssEcosystem)]
[InlineData("pip", "pip", NamespaceKind.OssEcosystem)]
[InlineData("nuget", "nuget", NamespaceKind.OssEcosystem)]
[InlineData("go", "go", NamespaceKind.OssEcosystem)]
[InlineData("cargo", "cargo", NamespaceKind.OssEcosystem)]
[InlineData("composer", "composer", NamespaceKind.OssEcosystem)]
[InlineData("gem", "gem", NamespaceKind.OssEcosystem)]
public void MapEcosystem_WithOssEcosystems_ReturnsCorrectMapping(
string ecosystem, string expectedName, NamespaceKind expectedKind)
{
var result = _mapper.MapEcosystem(ecosystem);
Assert.NotNull(result);
Assert.Equal(expectedName, result.Name);
Assert.Equal(expectedKind, result.Kind);
}
[Theory]
[InlineData("maven")]
[InlineData("gradle")]
[InlineData("sbt")]
public void MapEcosystem_WithJavaEcosystems_ReturnsJavaEcosystemKind(string ecosystem)
{
var result = _mapper.MapEcosystem(ecosystem);
Assert.NotNull(result);
Assert.Equal(ecosystem, result.Name);
Assert.Equal(NamespaceKind.JavaEcosystem, result.Kind);
}
[Theory]
[InlineData("pypi", "pip")]
[InlineData("rubygems", "gem")]
public void MapEcosystem_WithAliases_NormalizesToCanonical(string input, string expected)
{
var result = _mapper.MapEcosystem(input);
Assert.NotNull(result);
Assert.Equal(expected, result.Name);
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void MapEcosystem_WithNullOrEmpty_ReturnsNull(string? ecosystem)
{
var result = _mapper.MapEcosystem(ecosystem);
Assert.Null(result);
}
[Fact]
public void MapEcosystem_WithUnsupportedEcosystem_ReturnsNull()
{
var result = _mapper.MapEcosystem("unsupported-ecosystem");
Assert.Null(result);
}
[Theory]
[InlineData("ubuntu", "22.04", "ubuntu:22.04")]
[InlineData("debian", null, "debian")]
[InlineData("npm", null, "npm")]
public void FormatNamespace_FormatsCorrectly(string name, string? version, string expected)
{
var result = new TrivyNamespaceResult(name, version, NamespaceKind.Distribution);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
}

Some files were not shown because too many files have changed in this diff Show More