up the blokcing tasks
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Risk Bundle CI / risk-bundle-build (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Risk Bundle CI / risk-bundle-offline-kit (push) Has been cancelled
Risk Bundle CI / publish-checksums (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-12-11 02:32:18 +02:00
parent 92bc4d3a07
commit 49922dff5a
474 changed files with 76071 additions and 12411 deletions

View File

@@ -1,5 +1,9 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Adapters.Trivy;
using StellaOps.ExportCenter.Core.Encryption;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters;
@@ -40,7 +44,12 @@ public sealed class ExportAdapterRegistry : IExportAdapterRegistry
public ExportAdapterRegistry(IEnumerable<IExportAdapter> adapters)
{
_adapters = adapters.ToDictionary(a => a.AdapterId, StringComparer.OrdinalIgnoreCase);
// Last adapter wins for duplicate adapter IDs
_adapters = new Dictionary<string, IExportAdapter>(StringComparer.OrdinalIgnoreCase);
foreach (var adapter in adapters)
{
_adapters[adapter.AdapterId] = adapter;
}
// Build format to adapter map (first adapter wins for each format)
_formatMap = new Dictionary<ExportFormat, IExportAdapter>();
@@ -85,6 +94,47 @@ public static class ExportAdapterServiceExtensions
// Register individual adapters
services.AddSingleton<IExportAdapter, JsonRawAdapter>();
services.AddSingleton<IExportAdapter, JsonPolicyAdapter>();
services.AddSingleton<IExportAdapter>(sp =>
new MirrorAdapter(
sp.GetRequiredService<ILogger<MirrorAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register Trivy DB adapter
services.AddSingleton<IExportAdapter>(sp =>
new TrivyDbAdapter(
sp.GetRequiredService<ILogger<TrivyDbAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register Trivy Java DB adapter
services.AddSingleton<IExportAdapter>(sp =>
new TrivyJavaDbAdapter(
sp.GetRequiredService<ILogger<TrivyJavaDbAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register mirror delta infrastructure
services.AddSingleton<IMirrorBaseManifestStore, InMemoryMirrorBaseManifestStore>();
services.AddSingleton<IMirrorContentStore>(sp =>
new InMemoryMirrorContentStore(sp.GetRequiredService<ICryptoHash>()));
services.AddSingleton<IMirrorDeltaService, MirrorDeltaService>();
// Register Mirror Delta adapter
services.AddSingleton<IExportAdapter>(sp =>
new MirrorDeltaAdapter(
sp.GetRequiredService<ILogger<MirrorDeltaAdapter>>(),
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<IMirrorDeltaService>(),
sp.GetRequiredService<IMirrorBaseManifestStore>(),
sp.GetService<IMirrorContentStore>()));
// Register encryption services
services.AddSingleton<IAgeKeyWrapper, StubAgeKeyWrapper>();
// Note: IKmsKeyWrapper should be registered by specific KMS implementations (AWS, Azure, etc.)
services.AddSingleton<IBundleEncryptionService>(sp =>
new BundleEncryptionService(
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<ILogger<BundleEncryptionService>>(),
sp.GetService<IAgeKeyWrapper>(),
sp.GetService<IKmsKeyWrapper>()));
// Register the registry
services.AddSingleton<IExportAdapterRegistry>(sp =>

View File

@@ -0,0 +1,414 @@
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters;
/// <summary>
/// Export adapter that produces mirror bundles with filesystem layout, indexes, and manifests.
/// </summary>
public sealed class MirrorAdapter : IExportAdapter
{
private const string DefaultBundleFileName = "export-mirror-bundle-v1.tgz";
private readonly ILogger<MirrorAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
public MirrorAdapter(ILogger<MirrorAdapter> logger, ICryptoHash cryptoHash)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
/// <inheritdoc />
public string AdapterId => "mirror:standard";
/// <inheritdoc />
public string DisplayName => "Mirror Bundle";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.Mirror];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
_logger.LogInformation(
"Starting mirror bundle export for {ItemCount} items",
context.Items.Count);
// Create temp directory for staging files
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Collect and write items to temp files
var itemResults = new List<AdapterItemResult>();
var dataSources = await CollectDataSourcesAsync(
context,
tempDir,
itemResults,
cancellationToken);
if (dataSources.Count == 0)
{
_logger.LogWarning("No data sources collected for mirror bundle");
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
ManifestCounts = BuildManifestCounts(itemResults),
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
// Extract selectors from items
var selectors = ExtractSelectors(context.Items);
// Build the mirror bundle
var request = new MirrorBundleBuildRequest(
Guid.TryParse(context.CorrelationId, out var runId) ? runId : Guid.NewGuid(),
context.TenantId,
MirrorBundleVariant.Full,
selectors,
dataSources);
var builder = new MirrorBundleBuilder(_cryptoHash, context.TimeProvider);
var buildResult = builder.Build(request, cancellationToken);
// Write the bundle to output directory
var outputPath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-mirror-bundle-v1.tgz");
await using (var outputStream = new FileStream(
outputPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 128 * 1024,
useAsync: true))
{
buildResult.BundleStream.Position = 0;
await buildResult.BundleStream.CopyToAsync(outputStream, cancellationToken);
}
// Write checksum file if requested
var checksumPath = outputPath + ".sha256";
if (context.Config.IncludeChecksums)
{
var checksumContent = $"{buildResult.RootHash} {Path.GetFileName(outputPath)}\n";
await File.WriteAllTextAsync(checksumPath, checksumContent, cancellationToken);
}
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = outputPath,
SizeBytes = new FileInfo(outputPath).Length,
Sha256 = buildResult.RootHash,
ContentType = "application/gzip",
ItemCount = dataSources.Count,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 1,
TotalSizeBytes = artifact.SizeBytes,
ByKind = BuildKindCounts(context.Items, itemResults),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
_logger.LogInformation(
"Mirror bundle created: {Path} ({Bytes} bytes, {ItemCount} items, hash: {Hash})",
outputPath,
artifact.SizeBytes,
dataSources.Count,
buildResult.RootHash);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build mirror bundle");
return ExportAdapterResult.Failed($"Mirror bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Mirror adapter doesn't support streaming - all items must be processed together
// to build a single bundle
_logger.LogWarning("Mirror adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private async Task<List<MirrorBundleDataSource>> CollectDataSourcesAsync(
ExportAdapterContext context,
string tempDir,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
var dataSources = new List<MirrorBundleDataSource>();
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Determine category from item kind
var category = MapKindToCategory(item.Kind);
if (category is null)
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
$"Unknown item kind: {item.Kind}"));
continue;
}
// Create temp file for this item
var fileName = SanitizeFileName($"{item.Kind}-{item.Name ?? item.ItemId.ToString("N")}.json");
var categoryDir = Path.Combine(tempDir, category.Value.ToString().ToLowerInvariant());
Directory.CreateDirectory(categoryDir);
var tempFilePath = Path.Combine(categoryDir, fileName);
// Apply normalization if configured
var jsonContent = content.JsonContent!;
if (context.Config.FormatOptions.SortKeys || context.Config.FormatOptions.NormalizeTimestamps)
{
var normalizer = new JsonNormalizer(new JsonNormalizationOptions
{
SortKeys = context.Config.FormatOptions.SortKeys,
NormalizeTimestamps = context.Config.FormatOptions.NormalizeTimestamps
});
var normalized = normalizer.Normalize(jsonContent);
if (normalized.Success && normalized.NormalizedJson is not null)
{
jsonContent = normalized.NormalizedJson;
}
}
await File.WriteAllTextAsync(tempFilePath, jsonContent, cancellationToken);
dataSources.Add(new MirrorBundleDataSource(
category.Value,
tempFilePath,
context.Config.FormatOptions.SortKeys,
item.SourceRef));
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
OutputPath = tempFilePath,
OutputSizeBytes = new FileInfo(tempFilePath).Length,
ContentHash = content.OriginalHash,
ProcessedAt = DateTimeOffset.UtcNow
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
return dataSources;
}
private static MirrorBundleDataCategory? MapKindToCategory(string kind)
{
return kind.ToLowerInvariant() switch
{
"advisory" => MirrorBundleDataCategory.Advisories,
"advisories" => MirrorBundleDataCategory.Advisories,
"vex" => MirrorBundleDataCategory.Vex,
"sbom" => MirrorBundleDataCategory.Sbom,
"policy-snapshot" => MirrorBundleDataCategory.PolicySnapshot,
"policy-evaluations" => MirrorBundleDataCategory.PolicyEvaluations,
"policy-result" => MirrorBundleDataCategory.PolicyEvaluations,
"vex-consensus" => MirrorBundleDataCategory.VexConsensus,
"findings" => MirrorBundleDataCategory.Findings,
"scan-report" => MirrorBundleDataCategory.Findings,
_ => null
};
}
private static MirrorBundleSelectors ExtractSelectors(IReadOnlyList<ResolvedExportItem> items)
{
// Extract unique source refs as products
var products = items
.Select(i => i.SourceRef)
.Where(s => !string.IsNullOrWhiteSpace(s))
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
// Extract time window from item timestamps
var minCreated = items.Where(i => i.CreatedAt != default).Min(i => i.CreatedAt);
var maxCreated = items.Where(i => i.CreatedAt != default).Max(i => i.CreatedAt);
// Extract ecosystems from metadata if available
var ecosystems = items
.Where(i => i.Metadata.TryGetValue("ecosystem", out _))
.Select(i => i.Metadata["ecosystem"])
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
return new MirrorBundleSelectors(
products.Count > 0 ? products : ["*"],
minCreated != default ? minCreated : null,
maxCreated != default ? maxCreated : null,
ecosystems.Count > 0 ? ecosystems : null);
}
private static string SanitizeFileName(string name)
{
if (string.IsNullOrWhiteSpace(name))
{
return "item.json";
}
var result = name.Trim().ToLowerInvariant();
foreach (var invalid in Path.GetInvalidFileNameChars())
{
result = result.Replace(invalid, '_');
}
result = result.Replace('/', '_').Replace('\\', '_');
// Limit length
if (result.Length > 64)
{
var ext = Path.GetExtension(result);
result = result[..(60 - ext.Length)] + ext;
}
return string.IsNullOrWhiteSpace(result) ? "item.json" : result;
}
private static ExportManifestCounts BuildManifestCounts(IReadOnlyList<AdapterItemResult> itemResults)
{
return new ExportManifestCounts
{
TotalItems = itemResults.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 0,
TotalSizeBytes = 0,
ByKind = new Dictionary<string, int>(),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
}
private static IReadOnlyDictionary<string, int> BuildKindCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count());
}
}

View File

@@ -0,0 +1,658 @@
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters;
/// <summary>
/// Export adapter that produces delta mirror bundles with content-addressed reuse.
/// Only includes items that have changed since the base export.
/// </summary>
public sealed class MirrorDeltaAdapter : IExportAdapter
{
private readonly ILogger<MirrorDeltaAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly IMirrorDeltaService _deltaService;
private readonly IMirrorContentStore? _contentStore;
private readonly IMirrorBaseManifestStore _manifestStore;
public MirrorDeltaAdapter(
ILogger<MirrorDeltaAdapter> logger,
ICryptoHash cryptoHash,
IMirrorDeltaService deltaService,
IMirrorBaseManifestStore manifestStore,
IMirrorContentStore? contentStore = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_deltaService = deltaService ?? throw new ArgumentNullException(nameof(deltaService));
_manifestStore = manifestStore ?? throw new ArgumentNullException(nameof(manifestStore));
_contentStore = contentStore;
}
/// <inheritdoc />
public string AdapterId => "mirror:delta";
/// <inheritdoc />
public string DisplayName => "Mirror Delta Bundle";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.Mirror];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
// Extract delta options from metadata
var deltaOptions = ExtractDeltaOptions(context);
if (deltaOptions is null)
{
return ExportAdapterResult.Failed(
"Delta options required: provide 'baseExportId' and 'baseManifestDigest' in context metadata");
}
_logger.LogInformation(
"Starting mirror delta export against base {BaseExportId} for {ItemCount} items",
deltaOptions.BaseExportId, context.Items.Count);
// Create temp directory for staging files
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-delta-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Collect and hash all current items
var itemResults = new List<AdapterItemResult>();
var currentItems = await CollectCurrentItemsAsync(
context,
tempDir,
itemResults,
cancellationToken);
if (currentItems.Count == 0)
{
_logger.LogWarning("No items collected for delta comparison");
return CreateEmptyResult(context, startTime);
}
// Compute delta against base
var deltaRequest = new MirrorDeltaComputeRequest
{
BaseRunId = Guid.Parse(deltaOptions.BaseExportId),
BaseManifestDigest = deltaOptions.BaseManifestDigest,
TenantId = context.TenantId,
CurrentItems = currentItems,
ResetBaseline = deltaOptions.ResetBaseline
};
var deltaResult = await _deltaService.ComputeDeltaAsync(deltaRequest, cancellationToken);
if (!deltaResult.Success)
{
return ExportAdapterResult.Failed(deltaResult.ErrorMessage ?? "Delta computation failed");
}
// If no changes, return early with empty delta
if (deltaResult.AddedItems.Count == 0 &&
deltaResult.ChangedItems.Count == 0 &&
deltaResult.RemovedItems.Count == 0)
{
_logger.LogInformation("No changes detected since base export {BaseExportId}", deltaOptions.BaseExportId);
return CreateNoChangesResult(context, deltaResult, startTime);
}
// Build data sources only for changed items (reuse unchanged from content store)
var dataSources = await BuildDeltaDataSourcesAsync(
deltaResult,
tempDir,
cancellationToken);
// Build selectors from changed items
var selectors = ExtractSelectors(context.Items);
// Create the delta bundle request
var bundleRequest = new MirrorBundleBuildRequest(
Guid.TryParse(context.CorrelationId, out var runId) ? runId : Guid.NewGuid(),
context.TenantId,
MirrorBundleVariant.Delta,
selectors,
dataSources,
DeltaOptions: new MirrorBundleDeltaOptions(
deltaOptions.BaseExportId,
deltaResult.BaseManifestDigest ?? deltaOptions.BaseManifestDigest,
deltaOptions.ResetBaseline));
var builder = new MirrorBundleBuilder(_cryptoHash, context.TimeProvider);
var buildResult = builder.Build(bundleRequest, cancellationToken);
// Write the bundle to output directory
var outputPath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-mirror-delta-v1.tgz");
await using (var outputStream = new FileStream(
outputPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 128 * 1024,
useAsync: true))
{
buildResult.BundleStream.Position = 0;
await buildResult.BundleStream.CopyToAsync(outputStream, cancellationToken);
}
// Write checksum file if requested
if (context.Config.IncludeChecksums)
{
var checksumContent = $"{buildResult.RootHash} {Path.GetFileName(outputPath)}\n";
await File.WriteAllTextAsync(outputPath + ".sha256", checksumContent, cancellationToken);
}
// Write removed items manifest
if (deltaResult.RemovedItems.Count > 0)
{
var removedPath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-delta-removed.jsonl");
await WriteRemovedManifestAsync(deltaResult.RemovedItems, removedPath, cancellationToken);
}
// Save manifest entries for future delta comparisons
var manifestEntries = currentItems
.Select(i => new MirrorBaseManifestEntry
{
ItemId = i.ItemId,
Category = i.Category,
BundlePath = i.BundlePath,
ContentHash = i.ContentHash,
SizeBytes = i.SizeBytes
})
.ToList();
await _manifestStore.SaveManifestEntriesAsync(
bundleRequest.RunId,
context.TenantId,
buildResult.Manifest.Delta?.BaseManifestDigest ?? buildResult.RootHash,
manifestEntries,
cancellationToken);
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = outputPath,
SizeBytes = new FileInfo(outputPath).Length,
Sha256 = buildResult.RootHash,
ContentType = "application/gzip",
ItemCount = dataSources.Count,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = BuildManifestCounts(context.Items, itemResults, deltaResult, artifact.SizeBytes);
_logger.LogInformation(
"Mirror delta bundle created: {Path} ({Bytes} bytes, {Added} added, {Changed} changed, {Removed} removed)",
outputPath,
artifact.SizeBytes,
deltaResult.AddedItems.Count,
deltaResult.ChangedItems.Count,
deltaResult.RemovedItems.Count);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build mirror delta bundle");
return ExportAdapterResult.Failed($"Mirror delta bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
_logger.LogWarning("Mirror delta adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private static MirrorBundleDeltaOptions? ExtractDeltaOptions(ExportAdapterContext context)
{
// Check if we have a metadata dict with delta options in the context
// This would typically come from ExportPlan or ExportProfile configuration
var correlationParts = context.CorrelationId?.Split('|');
if (correlationParts?.Length >= 3)
{
return new MirrorBundleDeltaOptions(
correlationParts[1],
correlationParts[2],
correlationParts.Length > 3 && bool.TryParse(correlationParts[3], out var reset) && reset);
}
return null;
}
private async Task<List<MirrorDeltaItem>> CollectCurrentItemsAsync(
ExportAdapterContext context,
string tempDir,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
var items = new List<MirrorDeltaItem>();
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Determine category and bundle path
var category = MapKindToCategory(item.Kind);
if (category is null)
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
$"Unknown item kind: {item.Kind}"));
continue;
}
// Normalize content if configured
var jsonContent = content.JsonContent!;
if (context.Config.FormatOptions.SortKeys || context.Config.FormatOptions.NormalizeTimestamps)
{
var normalizer = new JsonNormalizer(new JsonNormalizationOptions
{
SortKeys = context.Config.FormatOptions.SortKeys,
NormalizeTimestamps = context.Config.FormatOptions.NormalizeTimestamps
});
var normalized = normalizer.Normalize(jsonContent);
if (normalized.Success && normalized.NormalizedJson is not null)
{
jsonContent = normalized.NormalizedJson;
}
}
// Compute content hash
var contentBytes = System.Text.Encoding.UTF8.GetBytes(jsonContent);
var contentHash = _cryptoHash.ComputeHashHexForPurpose(contentBytes, HashPurpose.Content);
// Write to temp file
var fileName = SanitizeFileName($"{item.Kind}-{item.Name ?? item.ItemId.ToString("N")}.json");
var categoryDir = Path.Combine(tempDir, category.Value.ToString().ToLowerInvariant());
Directory.CreateDirectory(categoryDir);
var tempFilePath = Path.Combine(categoryDir, fileName);
await File.WriteAllTextAsync(tempFilePath, jsonContent, cancellationToken);
// Compute bundle path
var bundlePath = ComputeBundlePath(category.Value, fileName, context.Config.FormatOptions.SortKeys, item.SourceRef);
items.Add(new MirrorDeltaItem
{
ItemId = item.ItemId.ToString("D"),
Category = category.Value,
ContentHash = contentHash,
BundlePath = bundlePath,
SizeBytes = contentBytes.LongLength,
ModifiedAt = item.CreatedAt != default ? item.CreatedAt : context.TimeProvider.GetUtcNow(),
SourcePath = tempFilePath
});
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
OutputPath = tempFilePath,
OutputSizeBytes = contentBytes.LongLength,
ContentHash = contentHash,
ProcessedAt = context.TimeProvider.GetUtcNow()
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
return items;
}
private async Task<List<MirrorBundleDataSource>> BuildDeltaDataSourcesAsync(
MirrorDeltaComputeResult deltaResult,
string tempDir,
CancellationToken cancellationToken)
{
var dataSources = new List<MirrorBundleDataSource>();
// Add all added items
foreach (var item in deltaResult.AddedItems)
{
if (string.IsNullOrEmpty(item.SourcePath))
continue;
dataSources.Add(new MirrorBundleDataSource(
item.Category,
item.SourcePath,
true,
item.ItemId));
}
// Add all changed items
foreach (var change in deltaResult.ChangedItems)
{
var item = change.Current;
if (string.IsNullOrEmpty(item.SourcePath))
continue;
dataSources.Add(new MirrorBundleDataSource(
item.Category,
item.SourcePath,
true,
item.ItemId));
}
// For unchanged items, try to reuse from content store if available
if (_contentStore is not null)
{
foreach (var item in deltaResult.UnchangedItems)
{
var localPath = _contentStore.GetLocalPath(item.ContentHash);
if (!string.IsNullOrEmpty(localPath) && File.Exists(localPath))
{
dataSources.Add(new MirrorBundleDataSource(
item.Category,
localPath,
true,
item.ItemId));
}
else if (!string.IsNullOrEmpty(item.SourcePath))
{
// Fall back to source path if content store doesn't have it
dataSources.Add(new MirrorBundleDataSource(
item.Category,
item.SourcePath,
true,
item.ItemId));
}
}
}
await Task.CompletedTask; // Placeholder for potential async content store operations
return dataSources;
}
private static async Task WriteRemovedManifestAsync(
IReadOnlyList<MirrorDeltaRemovedItem> removedItems,
string outputPath,
CancellationToken cancellationToken)
{
await using var writer = new StreamWriter(outputPath, append: false, System.Text.Encoding.UTF8);
foreach (var item in removedItems.OrderBy(i => i.BundlePath, StringComparer.Ordinal))
{
var json = System.Text.Json.JsonSerializer.Serialize(new
{
itemId = item.ItemId,
category = item.Category.ToString().ToLowerInvariant(),
bundlePath = item.BundlePath,
contentHash = item.ContentHash
});
await writer.WriteLineAsync(json);
}
}
private static MirrorBundleDataCategory? MapKindToCategory(string kind)
{
return kind.ToLowerInvariant() switch
{
"advisory" => MirrorBundleDataCategory.Advisories,
"advisories" => MirrorBundleDataCategory.Advisories,
"vex" => MirrorBundleDataCategory.Vex,
"sbom" => MirrorBundleDataCategory.Sbom,
"policy-snapshot" => MirrorBundleDataCategory.PolicySnapshot,
"policy-evaluations" => MirrorBundleDataCategory.PolicyEvaluations,
"policy-result" => MirrorBundleDataCategory.PolicyEvaluations,
"vex-consensus" => MirrorBundleDataCategory.VexConsensus,
"findings" => MirrorBundleDataCategory.Findings,
"scan-report" => MirrorBundleDataCategory.Findings,
_ => null
};
}
private static string ComputeBundlePath(MirrorBundleDataCategory category, string fileName, bool isNormalized, string? subjectId)
{
var prefix = isNormalized ? "data/normalized" : "data/raw";
return category switch
{
MirrorBundleDataCategory.Advisories => $"{prefix}/advisories/{fileName}",
MirrorBundleDataCategory.Vex => $"{prefix}/vex/{fileName}",
MirrorBundleDataCategory.Sbom when !string.IsNullOrEmpty(subjectId) =>
$"data/raw/sboms/{SanitizeSegment(subjectId)}/{fileName}",
MirrorBundleDataCategory.Sbom => $"data/raw/sboms/{fileName}",
MirrorBundleDataCategory.PolicySnapshot => "data/policy/snapshot.json",
MirrorBundleDataCategory.PolicyEvaluations => $"data/policy/{fileName}",
MirrorBundleDataCategory.VexConsensus => $"data/consensus/{fileName}",
MirrorBundleDataCategory.Findings => $"data/findings/{fileName}",
_ => $"data/other/{fileName}"
};
}
private static string SanitizeSegment(string value)
{
if (string.IsNullOrWhiteSpace(value))
return "subject";
var builder = new System.Text.StringBuilder(value.Length);
foreach (var ch in value.Trim())
{
if (char.IsLetterOrDigit(ch))
builder.Append(char.ToLowerInvariant(ch));
else if (ch is '-' or '_' or '.')
builder.Append(ch);
else
builder.Append('-');
}
return builder.Length == 0 ? "subject" : builder.ToString();
}
private static string SanitizeFileName(string name)
{
if (string.IsNullOrWhiteSpace(name))
return "item.json";
var result = name.Trim().ToLowerInvariant();
foreach (var invalid in Path.GetInvalidFileNameChars())
{
result = result.Replace(invalid, '_');
}
result = result.Replace('/', '_').Replace('\\', '_');
if (result.Length > 64)
{
var ext = Path.GetExtension(result);
result = result[..(60 - ext.Length)] + ext;
}
return string.IsNullOrWhiteSpace(result) ? "item.json" : result;
}
private static MirrorBundleSelectors ExtractSelectors(IReadOnlyList<ResolvedExportItem> items)
{
var products = items
.Select(i => i.SourceRef)
.Where(s => !string.IsNullOrWhiteSpace(s))
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
var timestamps = items.Where(i => i.CreatedAt != default).Select(i => i.CreatedAt).ToList();
var minCreated = timestamps.Count > 0 ? timestamps.Min() : default;
var maxCreated = timestamps.Count > 0 ? timestamps.Max() : default;
var ecosystems = items
.Where(i => i.Metadata.TryGetValue("ecosystem", out _))
.Select(i => i.Metadata["ecosystem"])
.Distinct()
.OrderBy(s => s, StringComparer.Ordinal)
.ToList();
return new MirrorBundleSelectors(
products.Count > 0 ? products : ["*"],
minCreated != default ? minCreated : null,
maxCreated != default ? maxCreated : null,
ecosystems.Count > 0 ? ecosystems : null);
}
private ExportAdapterResult CreateEmptyResult(ExportAdapterContext context, DateTimeOffset startTime)
{
return new ExportAdapterResult
{
Success = true,
ItemResults = [],
Artifacts = [],
ManifestCounts = new ExportManifestCounts(),
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
private ExportAdapterResult CreateNoChangesResult(
ExportAdapterContext context,
MirrorDeltaComputeResult deltaResult,
DateTimeOffset startTime)
{
_logger.LogInformation(
"Delta export completed with no changes. Base: {BaseExportId}",
deltaResult.BaseExportId);
return new ExportAdapterResult
{
Success = true,
ItemResults = [],
Artifacts = [],
ManifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = context.Items.Count,
SuccessfulItems = context.Items.Count,
SkippedItems = context.Items.Count, // All items skipped due to no changes
ByStatus = new Dictionary<string, int>
{
["unchanged"] = deltaResult.UnchangedItems.Count
}
},
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
private static ExportManifestCounts BuildManifestCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results,
MirrorDeltaComputeResult deltaResult,
long totalSizeBytes)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return new ExportManifestCounts
{
TotalItems = items.Count,
ProcessedItems = results.Count,
SuccessfulItems = results.Count(r => r.Success),
FailedItems = results.Count(r => !r.Success),
SkippedItems = deltaResult.UnchangedItems.Count,
ArtifactCount = 1,
TotalSizeBytes = totalSizeBytes,
ByKind = items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count()),
ByStatus = new Dictionary<string, int>
{
["added"] = deltaResult.AddedItems.Count,
["changed"] = deltaResult.ChangedItems.Count,
["removed"] = deltaResult.RemovedItems.Count,
["unchanged"] = deltaResult.UnchangedItems.Count
}
};
}
}

View File

@@ -0,0 +1,529 @@
using System.IO.Compression;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Export adapter that produces Trivy-compatible vulnerability database bundles.
/// Schema v2 compatible with Trivy 0.46.x - 0.50.x.
/// </summary>
public sealed class TrivyDbAdapter : IExportAdapter
{
private const int SupportedSchemaVersion = 2;
private const string BundleFileName = "trivy-db.tar.gz";
private const string MetadataFileName = "metadata.json";
private const string DbFileName = "trivy.db";
private readonly ILogger<TrivyDbAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly TrivyDbAdapterOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = null // Preserve exact property names
};
public TrivyDbAdapter(
ILogger<TrivyDbAdapter> logger,
ICryptoHash cryptoHash,
TrivyDbAdapterOptions? options = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = options ?? new TrivyDbAdapterOptions();
}
/// <inheritdoc />
public string AdapterId => "trivy:db";
/// <inheritdoc />
public string DisplayName => "Trivy Vulnerability Database";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.TrivyDb];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
_logger.LogInformation(
"Starting Trivy DB export for {ItemCount} items (schema v{SchemaVersion})",
context.Items.Count,
_options.SchemaVersion);
// Validate schema version
if (_options.SchemaVersion != SupportedSchemaVersion)
{
return ExportAdapterResult.Failed(
$"Unsupported Trivy DB schema version {_options.SchemaVersion}. Only v{SupportedSchemaVersion} is supported.");
}
// Create temp directory for staging
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-db-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Process items and collect vulnerabilities
var itemResults = new List<AdapterItemResult>();
var namespaces = new Dictionary<string, TrivyNamespaceEntry>(StringComparer.OrdinalIgnoreCase);
var mapper = new TrivySchemaMapper(
_logger.CreateLogger<TrivySchemaMapper>(),
_options);
await CollectVulnerabilitiesAsync(
context,
mapper,
namespaces,
itemResults,
cancellationToken);
var totalVulnCount = namespaces.Values.Sum(ns => ns.Vulnerabilities.Count);
// Check for empty output
if (totalVulnCount == 0 && !_options.AllowEmpty)
{
return ExportAdapterResult.Failed(
"No vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.");
}
_logger.LogInformation(
"Collected {VulnCount} vulnerabilities across {NamespaceCount} namespaces",
totalVulnCount,
namespaces.Count);
// Build the database file (JSON-based for simplicity)
var dbPath = Path.Combine(tempDir, DbFileName);
await BuildDatabaseFileAsync(namespaces, dbPath, cancellationToken);
// Generate metadata
var metadata = GenerateMetadata(
context,
namespaces.Count,
totalVulnCount);
var metadataPath = Path.Combine(tempDir, MetadataFileName);
var metadataJson = JsonSerializer.Serialize(metadata, JsonOptions);
await File.WriteAllTextAsync(metadataPath, metadataJson, cancellationToken);
// Create the bundle tarball
var bundlePath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-{BundleFileName}");
await CreateBundleAsync(tempDir, bundlePath, cancellationToken);
// Calculate bundle hash
var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken);
var bundleHash = _cryptoHash.ComputeHashHex(bundleBytes, "sha256");
// Write checksum file if requested
if (context.Config.IncludeChecksums)
{
var checksumPath = bundlePath + ".sha256";
var checksumContent = $"{bundleHash} {Path.GetFileName(bundlePath)}\n";
await File.WriteAllTextAsync(checksumPath, checksumContent, cancellationToken);
}
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = bundlePath,
SizeBytes = bundleBytes.Length,
Sha256 = bundleHash,
ContentType = "application/gzip",
ItemCount = totalVulnCount,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 1,
TotalSizeBytes = artifact.SizeBytes,
ByKind = BuildKindCounts(context.Items, itemResults),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
_logger.LogInformation(
"Trivy DB bundle created: {Path} ({Bytes} bytes, {VulnCount} vulnerabilities, {NamespaceCount} namespaces, hash: {Hash})",
bundlePath,
artifact.SizeBytes,
totalVulnCount,
namespaces.Count,
bundleHash);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build Trivy DB bundle");
return ExportAdapterResult.Failed($"Trivy DB bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Trivy DB adapter doesn't support streaming - all items must be processed together
_logger.LogWarning("Trivy DB adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
// Validate output directory
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
// Validate format
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
// Validate schema version
if (_options.SchemaVersion != SupportedSchemaVersion)
{
errors.Add($"Schema version {_options.SchemaVersion} is not supported. Only v{SupportedSchemaVersion} is supported.");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private async Task CollectVulnerabilitiesAsync(
ExportAdapterContext context,
TrivySchemaMapper mapper,
Dictionary<string, TrivyNamespaceEntry> namespaces,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
// Only process advisory-type items
if (!IsAdvisoryItem(item.Kind))
{
_logger.LogDebug("Skipping non-advisory item {ItemId} of kind {Kind}", item.ItemId, item.Kind);
continue;
}
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Map to Trivy vulnerabilities
var vulns = mapper.MapAdvisory(content.JsonContent, item.SourceRef);
if (vulns.Count == 0)
{
_logger.LogDebug("No vulnerabilities mapped from item {ItemId}", item.ItemId);
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ProcessedAt = DateTimeOffset.UtcNow
});
continue;
}
// Group by namespace
foreach (var vuln in vulns)
{
var ns = vuln.DataSource?.Id ?? "unknown";
if (!namespaces.TryGetValue(ns, out var entry))
{
entry = new TrivyNamespaceEntry { Namespace = ns };
namespaces[ns] = entry;
}
// Deduplicate by (vulnId, packageName, version)
var key = $"{vuln.VulnerabilityId}|{vuln.PackageName}|{vuln.InstalledVersion}";
if (!entry.Vulnerabilities.Any(v =>
$"{v.VulnerabilityId}|{v.PackageName}|{v.InstalledVersion}" == key))
{
entry.Vulnerabilities.Add(vuln);
}
}
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ContentHash = content.OriginalHash,
ProcessedAt = DateTimeOffset.UtcNow
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
}
private static bool IsAdvisoryItem(string kind)
{
return kind.Equals("advisory", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("advisories", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("vulnerability", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("cve", StringComparison.OrdinalIgnoreCase);
}
private async Task BuildDatabaseFileAsync(
Dictionary<string, TrivyNamespaceEntry> namespaces,
string dbPath,
CancellationToken cancellationToken)
{
// For simplicity, we use a JSON-based format that Trivy can import
// In production, this would be a BoltDB file
var dbContent = new Dictionary<string, object>
{
["version"] = SupportedSchemaVersion,
["namespaces"] = namespaces.Values
.OrderBy(ns => ns.Namespace, StringComparer.Ordinal)
.Select(ns => new
{
ns.Namespace,
Vulnerabilities = ns.Vulnerabilities
.OrderBy(v => v.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(v => v.PackageName, StringComparer.Ordinal)
.ToList()
})
.ToList()
};
var json = JsonSerializer.Serialize(dbContent, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = null
});
await File.WriteAllTextAsync(dbPath, json, Encoding.UTF8, cancellationToken);
}
private TrivyDbMetadata GenerateMetadata(
ExportAdapterContext context,
int namespaceCount,
int vulnerabilityCount)
{
var now = context.TimeProvider.GetUtcNow();
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : Guid.NewGuid();
return new TrivyDbMetadata
{
Version = SupportedSchemaVersion,
Type = 0, // Full DB
UpdatedAt = now,
DownloadedAt = now,
NextUpdate = now.AddDays(1), // Default to next day
Stella = new TrivyDbStellaExtension
{
Version = "1.0.0",
RunId = runId,
TenantId = context.TenantId,
SchemaVersion = SupportedSchemaVersion,
GeneratedAt = now,
SourceCount = namespaceCount,
VulnerabilityCount = vulnerabilityCount
}
};
}
private static async Task CreateBundleAsync(
string sourceDir,
string outputPath,
CancellationToken cancellationToken)
{
// Create a memory stream for the tar, then gzip it
using var tarStream = new MemoryStream();
// Simple tar creation (header + content for each file)
foreach (var file in Directory.GetFiles(sourceDir))
{
cancellationToken.ThrowIfCancellationRequested();
var fileName = Path.GetFileName(file);
var content = await File.ReadAllBytesAsync(file, cancellationToken);
// Write tar header (simplified USTAR format)
WriteTarHeader(tarStream, fileName, content.Length);
tarStream.Write(content);
// Pad to 512-byte boundary
var padding = (512 - (content.Length % 512)) % 512;
if (padding > 0)
{
tarStream.Write(new byte[padding]);
}
}
// Write two empty 512-byte blocks to end the archive
tarStream.Write(new byte[1024]);
// Gzip the tar stream
tarStream.Position = 0;
await using var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write);
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
await tarStream.CopyToAsync(gzipStream, cancellationToken);
}
private static void WriteTarHeader(Stream stream, string fileName, long fileSize)
{
var header = new byte[512];
// File name (100 bytes)
var nameBytes = Encoding.ASCII.GetBytes(fileName);
Array.Copy(nameBytes, 0, header, 0, Math.Min(nameBytes.Length, 100));
// File mode (8 bytes) - 0644
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
// UID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
// GID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
// Size (12 bytes) - octal
var sizeOctal = Convert.ToString(fileSize, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124);
// Mtime (12 bytes) - fixed for determinism (2024-01-01 00:00:00 UTC)
Encoding.ASCII.GetBytes("17042672000\0").CopyTo(header, 136);
// Checksum placeholder (8 spaces)
Encoding.ASCII.GetBytes(" ").CopyTo(header, 148);
// Type flag - '0' for regular file
header[156] = (byte)'0';
// Magic (6 bytes) - "ustar\0"
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
// Version (2 bytes) - "00"
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
// Calculate checksum
var checksum = 0;
for (var i = 0; i < 512; i++)
{
checksum += header[i];
}
var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148);
stream.Write(header);
}
private static IReadOnlyDictionary<string, int> BuildKindCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count());
}
}
internal static class LoggerExtensions
{
public static ILogger<T> CreateLogger<T>(this ILogger logger)
{
// Use the same logger factory if available, otherwise create a null logger
return new LoggerWrapper<T>(logger);
}
private sealed class LoggerWrapper<T> : ILogger<T>
{
private readonly ILogger _inner;
public LoggerWrapper(ILogger inner) => _inner = inner;
public IDisposable? BeginScope<TState>(TState state) where TState : notnull
=> _inner.BeginScope(state);
public bool IsEnabled(LogLevel logLevel) => _inner.IsEnabled(logLevel);
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
=> _inner.Log(logLevel, eventId, state, exception, formatter);
}
}

View File

@@ -0,0 +1,374 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Configuration options for Trivy DB adapter.
/// </summary>
public sealed record TrivyDbAdapterOptions
{
/// <summary>
/// Trivy DB schema version to generate. Only v2 is currently supported.
/// </summary>
public int SchemaVersion { get; init; } = 2;
/// <summary>
/// Whether to include Java DB bundle (Maven/Gradle/SBT supplement).
/// </summary>
public bool IncludeJavaDb { get; init; }
/// <summary>
/// Whether to allow empty output (fail if no records match when false).
/// </summary>
public bool AllowEmpty { get; init; }
/// <summary>
/// Maximum CVSS vectors to include per vulnerability entry.
/// </summary>
public int MaxCvssVectorsPerEntry { get; init; } = 5;
/// <summary>
/// Namespaces to include (empty = all).
/// </summary>
public IReadOnlyList<string> IncludeNamespaces { get; init; } = [];
/// <summary>
/// Namespaces to exclude.
/// </summary>
public IReadOnlyList<string> ExcludeNamespaces { get; init; } = [];
}
/// <summary>
/// Trivy DB metadata.json structure.
/// </summary>
public sealed record TrivyDbMetadata
{
[JsonPropertyName("version")]
public int Version { get; init; } = 2;
[JsonPropertyName("type")]
public int Type { get; init; } = 0; // 0 = full DB, 1 = light
[JsonPropertyName("nextUpdate")]
public DateTimeOffset NextUpdate { get; init; }
[JsonPropertyName("updatedAt")]
public DateTimeOffset UpdatedAt { get; init; }
[JsonPropertyName("downloadedAt")]
public DateTimeOffset DownloadedAt { get; init; }
/// <summary>
/// StellaOps extension block for provenance tracking.
/// </summary>
[JsonPropertyName("stella")]
public TrivyDbStellaExtension? Stella { get; init; }
}
/// <summary>
/// StellaOps extension block in Trivy metadata.
/// </summary>
public sealed record TrivyDbStellaExtension
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0.0";
[JsonPropertyName("runId")]
public Guid RunId { get; init; }
[JsonPropertyName("profileId")]
public Guid? ProfileId { get; init; }
[JsonPropertyName("tenantId")]
public Guid TenantId { get; init; }
[JsonPropertyName("policySnapshotId")]
public Guid? PolicySnapshotId { get; init; }
[JsonPropertyName("schemaVersion")]
public int SchemaVersion { get; init; }
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
[JsonPropertyName("sourceCount")]
public int SourceCount { get; init; }
[JsonPropertyName("vulnerabilityCount")]
public int VulnerabilityCount { get; init; }
}
/// <summary>
/// Trivy vulnerability entry (simplified schema v2 compatible).
/// </summary>
public sealed record TrivyVulnerability
{
[JsonPropertyName("VulnerabilityID")]
public required string VulnerabilityId { get; init; }
[JsonPropertyName("PkgName")]
public required string PackageName { get; init; }
[JsonPropertyName("InstalledVersion")]
public string? InstalledVersion { get; init; }
[JsonPropertyName("FixedVersion")]
public string? FixedVersion { get; init; }
[JsonPropertyName("Severity")]
public required string Severity { get; init; }
[JsonPropertyName("SeveritySource")]
public string? SeveritySource { get; init; }
[JsonPropertyName("Title")]
public string? Title { get; init; }
[JsonPropertyName("Description")]
public string? Description { get; init; }
[JsonPropertyName("References")]
public IReadOnlyList<string> References { get; init; } = [];
[JsonPropertyName("CVSS")]
public IReadOnlyDictionary<string, TrivyCvss>? Cvss { get; init; }
[JsonPropertyName("CweIDs")]
public IReadOnlyList<string> CweIds { get; init; } = [];
[JsonPropertyName("PublishedDate")]
public DateTimeOffset? PublishedDate { get; init; }
[JsonPropertyName("LastModifiedDate")]
public DateTimeOffset? LastModifiedDate { get; init; }
[JsonPropertyName("DataSource")]
public TrivyDataSource? DataSource { get; init; }
}
/// <summary>
/// CVSS score entry for Trivy format.
/// </summary>
public sealed record TrivyCvss
{
[JsonPropertyName("V2Vector")]
public string? V2Vector { get; init; }
[JsonPropertyName("V3Vector")]
public string? V3Vector { get; init; }
[JsonPropertyName("V2Score")]
public double? V2Score { get; init; }
[JsonPropertyName("V3Score")]
public double? V3Score { get; init; }
}
/// <summary>
/// Data source reference for Trivy vulnerability.
/// </summary>
public sealed record TrivyDataSource
{
[JsonPropertyName("ID")]
public required string Id { get; init; }
[JsonPropertyName("Name")]
public required string Name { get; init; }
[JsonPropertyName("URL")]
public string? Url { get; init; }
}
/// <summary>
/// Trivy namespace (vendor/ecosystem) entry.
/// </summary>
public sealed record TrivyNamespaceEntry
{
/// <summary>
/// Namespace identifier (e.g., "ubuntu", "alpine", "npm").
/// </summary>
public required string Namespace { get; init; }
/// <summary>
/// Vulnerabilities in this namespace.
/// </summary>
public List<TrivyVulnerability> Vulnerabilities { get; init; } = [];
}
/// <summary>
/// Result of Trivy DB bundle build.
/// </summary>
public sealed record TrivyDbBuildResult
{
/// <summary>
/// Whether the build succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Bundle stream (tar.gz).
/// </summary>
public MemoryStream? BundleStream { get; init; }
/// <summary>
/// SHA-256 hash of the bundle.
/// </summary>
public string? BundleHash { get; init; }
/// <summary>
/// Generated metadata.
/// </summary>
public TrivyDbMetadata? Metadata { get; init; }
/// <summary>
/// Number of namespaces in the bundle.
/// </summary>
public int NamespaceCount { get; init; }
/// <summary>
/// Total number of vulnerability entries.
/// </summary>
public int VulnerabilityCount { get; init; }
/// <summary>
/// Error message if build failed.
/// </summary>
public string? ErrorMessage { get; init; }
public static TrivyDbBuildResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Severity mapping between StellaOps and Trivy.
/// </summary>
public static class TrivySeverityMapper
{
private static readonly Dictionary<string, string> SeverityMap = new(StringComparer.OrdinalIgnoreCase)
{
["critical"] = "CRITICAL",
["high"] = "HIGH",
["medium"] = "MEDIUM",
["moderate"] = "MEDIUM",
["low"] = "LOW",
["none"] = "UNKNOWN",
["info"] = "UNKNOWN",
["informational"] = "UNKNOWN",
["unknown"] = "UNKNOWN"
};
/// <summary>
/// Maps a StellaOps severity to Trivy severity.
/// </summary>
public static string MapSeverity(string? severity)
{
if (string.IsNullOrWhiteSpace(severity))
return "UNKNOWN";
return SeverityMap.TryGetValue(severity.Trim(), out var mapped)
? mapped
: "UNKNOWN";
}
}
/// <summary>
/// Namespace mapper for vendor/ecosystem to Trivy namespace.
/// </summary>
public static class TrivyNamespaceMapper
{
private static readonly Dictionary<string, string> VendorToNamespace = new(StringComparer.OrdinalIgnoreCase)
{
// OS distributions
["Ubuntu"] = "ubuntu",
["Debian"] = "debian",
["Alpine"] = "alpine",
["Red Hat"] = "redhat",
["RedHat"] = "redhat",
["RHEL"] = "redhat",
["CentOS"] = "centos",
["Oracle Linux"] = "oracle",
["Amazon Linux"] = "amazon",
["SUSE"] = "suse",
["openSUSE"] = "opensuse",
["Photon OS"] = "photon",
["Arch Linux"] = "arch",
["Fedora"] = "fedora",
["Rocky Linux"] = "rocky",
["AlmaLinux"] = "alma",
["Wolfi"] = "wolfi",
["Chainguard"] = "chainguard",
["Mariner"] = "mariner",
["CBL-Mariner"] = "mariner",
// Language ecosystems
["npm"] = "npm",
["Node.js"] = "npm",
["PyPI"] = "pypi",
["Python"] = "pypi",
["RubyGems"] = "rubygems",
["Ruby"] = "rubygems",
["Maven"] = "maven",
["Java"] = "maven",
["NuGet"] = "nuget",
[".NET"] = "nuget",
["Go"] = "go",
["Golang"] = "go",
["Cargo"] = "cargo",
["Rust"] = "cargo",
["Packagist"] = "packagist",
["PHP"] = "packagist",
["Hex"] = "hex",
["Erlang"] = "hex",
["Elixir"] = "hex",
["Pub"] = "pub",
["Dart"] = "pub",
["Swift"] = "swift",
["CocoaPods"] = "cocoapods",
// Generic sources
["NVD"] = "nvd",
["GHSA"] = "ghsa",
["GitHub"] = "ghsa",
["OSV"] = "osv",
["CISA KEV"] = "kev"
};
/// <summary>
/// Maps a vendor/ecosystem to Trivy namespace.
/// </summary>
public static string MapToNamespace(string? vendor, string? ecosystem = null)
{
// Try vendor first
if (!string.IsNullOrWhiteSpace(vendor) &&
VendorToNamespace.TryGetValue(vendor.Trim(), out var ns))
{
return ns;
}
// Try ecosystem
if (!string.IsNullOrWhiteSpace(ecosystem) &&
VendorToNamespace.TryGetValue(ecosystem.Trim(), out ns))
{
return ns;
}
// Fallback to lowercase vendor
return string.IsNullOrWhiteSpace(vendor)
? "unknown"
: vendor.Trim().ToLowerInvariant().Replace(" ", "-");
}
/// <summary>
/// Checks if a namespace is a language ecosystem (vs OS distribution).
/// </summary>
public static bool IsLanguageEcosystem(string ns)
{
return ns switch
{
"npm" or "pypi" or "rubygems" or "maven" or "nuget" or
"go" or "cargo" or "packagist" or "hex" or "pub" or
"swift" or "cocoapods" => true,
_ => false
};
}
}

View File

@@ -0,0 +1,622 @@
using System.IO.Compression;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Planner;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Export adapter that produces Trivy Java DB bundles (Maven/Gradle/SBT supplement).
/// Schema v1 compatible with Trivy 0.46.x - 0.50.x Java scanning.
/// </summary>
public sealed class TrivyJavaDbAdapter : IExportAdapter
{
private const int SupportedSchemaVersion = 1;
private const string BundleFileName = "trivy-java-db.tar.gz";
private const string MetadataFileName = "metadata.json";
private const string IndexFileName = "indexes.json";
// Java ecosystem namespaces
private static readonly HashSet<string> JavaNamespaces = new(StringComparer.OrdinalIgnoreCase)
{
"maven",
"gradle",
"sbt",
"java",
"ghsa-maven"
};
private readonly ILogger<TrivyJavaDbAdapter> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly TrivyDbAdapterOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = null // Preserve exact property names
};
public TrivyJavaDbAdapter(
ILogger<TrivyJavaDbAdapter> logger,
ICryptoHash cryptoHash,
TrivyDbAdapterOptions? options = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = options ?? new TrivyDbAdapterOptions { IncludeJavaDb = true };
}
/// <inheritdoc />
public string AdapterId => "trivy:java-db";
/// <inheritdoc />
public string DisplayName => "Trivy Java Vulnerability Database";
/// <inheritdoc />
public IReadOnlyList<ExportFormat> SupportedFormats { get; } = [ExportFormat.TrivyJavaDb];
/// <inheritdoc />
public bool SupportsStreaming => false;
/// <inheritdoc />
public async Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
var startTime = context.TimeProvider.GetUtcNow();
try
{
_logger.LogInformation(
"Starting Trivy Java DB export for {ItemCount} items",
context.Items.Count);
// Create temp directory for staging
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-java-db-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Process items and collect Java-specific vulnerabilities
var itemResults = new List<AdapterItemResult>();
var javaIndexes = new Dictionary<string, JavaDbIndex>(StringComparer.OrdinalIgnoreCase);
var javaOptions = new TrivyDbAdapterOptions
{
SchemaVersion = SupportedSchemaVersion,
IncludeJavaDb = true,
IncludeNamespaces = JavaNamespaces.ToList(),
AllowEmpty = _options.AllowEmpty,
MaxCvssVectorsPerEntry = _options.MaxCvssVectorsPerEntry
};
var mapper = new TrivySchemaMapper(
_logger.CreateLogger<TrivySchemaMapper>(),
javaOptions);
await CollectJavaVulnerabilitiesAsync(
context,
mapper,
javaIndexes,
itemResults,
cancellationToken);
var totalVulnCount = javaIndexes.Values.Sum(idx => idx.Entries.Count);
// Check for empty output
if (totalVulnCount == 0 && !_options.AllowEmpty)
{
return ExportAdapterResult.Failed(
"No Java vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.");
}
_logger.LogInformation(
"Collected {VulnCount} Java vulnerabilities across {IndexCount} GAV indexes",
totalVulnCount,
javaIndexes.Count);
// Build the indexes file (GAV-based lookup)
var indexesPath = Path.Combine(tempDir, IndexFileName);
await BuildIndexesFileAsync(javaIndexes, indexesPath, cancellationToken);
// Generate metadata
var metadata = GenerateMetadata(context, javaIndexes.Count, totalVulnCount);
var metadataPath = Path.Combine(tempDir, MetadataFileName);
var metadataJson = JsonSerializer.Serialize(metadata, JsonOptions);
await File.WriteAllTextAsync(metadataPath, metadataJson, cancellationToken);
// Create the bundle tarball
var bundlePath = Path.Combine(
context.Config.OutputDirectory,
$"{context.Config.BaseName}-{BundleFileName}");
await CreateBundleAsync(tempDir, bundlePath, cancellationToken);
// Calculate bundle hash
var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken);
var bundleHash = _cryptoHash.ComputeHashHex(bundleBytes, "sha256");
// Write checksum file if requested
if (context.Config.IncludeChecksums)
{
var checksumPath = bundlePath + ".sha256";
var checksumContent = $"{bundleHash} {Path.GetFileName(bundlePath)}\n";
await File.WriteAllTextAsync(checksumPath, checksumContent, cancellationToken);
}
// Create artifact entry
var artifact = new ExportOutputArtifact
{
Path = bundlePath,
SizeBytes = bundleBytes.Length,
Sha256 = bundleHash,
ContentType = "application/gzip",
ItemCount = totalVulnCount,
IsCompressed = true,
Compression = CompressionFormat.Gzip
};
var manifestCounts = new ExportManifestCounts
{
TotalItems = context.Items.Count,
ProcessedItems = itemResults.Count,
SuccessfulItems = itemResults.Count(r => r.Success),
FailedItems = itemResults.Count(r => !r.Success),
ArtifactCount = 1,
TotalSizeBytes = artifact.SizeBytes,
ByKind = BuildKindCounts(context.Items, itemResults),
ByStatus = new Dictionary<string, int>
{
["success"] = itemResults.Count(r => r.Success),
["failed"] = itemResults.Count(r => !r.Success)
}
};
_logger.LogInformation(
"Trivy Java DB bundle created: {Path} ({Bytes} bytes, {VulnCount} vulnerabilities, {IndexCount} GAV indexes, hash: {Hash})",
bundlePath,
artifact.SizeBytes,
totalVulnCount,
javaIndexes.Count,
bundleHash);
return new ExportAdapterResult
{
Success = true,
ItemResults = itemResults,
Artifacts = [artifact],
ManifestCounts = manifestCounts,
ProcessingTime = context.TimeProvider.GetUtcNow() - startTime,
CompletedAt = context.TimeProvider.GetUtcNow()
};
}
finally
{
// Clean up temp directory
try
{
Directory.Delete(tempDir, recursive: true);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clean up temp directory: {Path}", tempDir);
}
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to build Trivy Java DB bundle");
return ExportAdapterResult.Failed($"Trivy Java DB bundle build failed: {ex.Message}");
}
}
/// <inheritdoc />
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Java DB adapter doesn't support streaming - all items must be processed together
_logger.LogWarning("Trivy Java DB adapter does not support streaming. Use ProcessAsync instead.");
var result = await ProcessAsync(context, cancellationToken);
foreach (var itemResult in result.ItemResults)
{
yield return itemResult;
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
// Validate output directory
if (string.IsNullOrWhiteSpace(config.OutputDirectory))
{
errors.Add("Output directory must be specified.");
}
else if (!Directory.Exists(config.OutputDirectory))
{
try
{
Directory.CreateDirectory(config.OutputDirectory);
}
catch (Exception ex)
{
errors.Add($"Cannot create output directory: {ex.Message}");
}
}
// Validate format
if (!SupportedFormats.Contains(config.FormatOptions.Format))
{
errors.Add($"Format '{config.FormatOptions.Format}' is not supported by this adapter. Supported: {string.Join(", ", SupportedFormats)}");
}
return Task.FromResult<IReadOnlyList<string>>(errors);
}
private async Task CollectJavaVulnerabilitiesAsync(
ExportAdapterContext context,
TrivySchemaMapper mapper,
Dictionary<string, JavaDbIndex> javaIndexes,
List<AdapterItemResult> itemResults,
CancellationToken cancellationToken)
{
foreach (var item in context.Items)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
// Only process advisory-type items
if (!IsAdvisoryItem(item.Kind))
{
_logger.LogDebug("Skipping non-advisory item {ItemId} of kind {Kind}", item.ItemId, item.Kind);
continue;
}
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
if (!content.Success || string.IsNullOrWhiteSpace(content.JsonContent))
{
itemResults.Add(AdapterItemResult.Failed(
item.ItemId,
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
continue;
}
// Map to Trivy vulnerabilities (filtered to Java namespaces)
var vulns = mapper.MapAdvisory(content.JsonContent, item.SourceRef);
if (vulns.Count == 0)
{
_logger.LogDebug("No Java vulnerabilities mapped from item {ItemId}", item.ItemId);
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ProcessedAt = DateTimeOffset.UtcNow
});
continue;
}
// Build GAV-based indexes (GroupId:ArtifactId:Version)
foreach (var vuln in vulns)
{
// Only include Java ecosystem vulnerabilities
var ns = vuln.DataSource?.Id ?? "unknown";
if (!IsJavaNamespace(ns))
{
continue;
}
// Parse package name as GAV coordinate
var gav = ParseGavCoordinate(vuln.PackageName);
if (gav is null)
{
// Fall back to using package name as artifact ID
gav = new GavCoordinate("unknown", vuln.PackageName, vuln.InstalledVersion);
}
var indexKey = $"{gav.GroupId}:{gav.ArtifactId}";
if (!javaIndexes.TryGetValue(indexKey, out var index))
{
index = new JavaDbIndex
{
GroupId = gav.GroupId,
ArtifactId = gav.ArtifactId
};
javaIndexes[indexKey] = index;
}
// Add vulnerability entry
var entry = new JavaDbEntry
{
VulnerabilityId = vuln.VulnerabilityId,
AffectedVersions = vuln.InstalledVersion ?? "*",
FixedVersions = vuln.FixedVersion,
Severity = vuln.Severity,
Title = vuln.Title,
Description = vuln.Description?.Length > 500
? vuln.Description[..500] + "..."
: vuln.Description
};
// Deduplicate
if (!index.Entries.Any(e => e.VulnerabilityId == entry.VulnerabilityId &&
e.AffectedVersions == entry.AffectedVersions))
{
index.Entries.Add(entry);
}
}
itemResults.Add(new AdapterItemResult
{
ItemId = item.ItemId,
Success = true,
ContentHash = content.OriginalHash,
ProcessedAt = DateTimeOffset.UtcNow
});
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
}
}
}
private static bool IsAdvisoryItem(string kind)
{
return kind.Equals("advisory", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("advisories", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("vulnerability", StringComparison.OrdinalIgnoreCase) ||
kind.Equals("cve", StringComparison.OrdinalIgnoreCase);
}
private static bool IsJavaNamespace(string ns)
{
return JavaNamespaces.Contains(ns) ||
ns.StartsWith("maven", StringComparison.OrdinalIgnoreCase) ||
ns.StartsWith("gradle", StringComparison.OrdinalIgnoreCase) ||
ns.Contains("java", StringComparison.OrdinalIgnoreCase);
}
private static GavCoordinate? ParseGavCoordinate(string packageName)
{
if (string.IsNullOrWhiteSpace(packageName))
return null;
// Try GroupId:ArtifactId:Version format
var parts = packageName.Split(':');
if (parts.Length >= 2)
{
return new GavCoordinate(
parts[0],
parts[1],
parts.Length >= 3 ? parts[2] : null);
}
// Try GroupId/ArtifactId format (PURL style)
var slashIndex = packageName.LastIndexOf('/');
if (slashIndex > 0)
{
return new GavCoordinate(
packageName[..slashIndex].Replace('/', '.'),
packageName[(slashIndex + 1)..],
null);
}
return null;
}
private static async Task BuildIndexesFileAsync(
Dictionary<string, JavaDbIndex> indexes,
string indexesPath,
CancellationToken cancellationToken)
{
var sortedIndexes = indexes.Values
.OrderBy(idx => idx.GroupId, StringComparer.Ordinal)
.ThenBy(idx => idx.ArtifactId, StringComparer.Ordinal)
.Select(idx => new
{
idx.GroupId,
idx.ArtifactId,
Vulnerabilities = idx.Entries
.OrderBy(e => e.VulnerabilityId, StringComparer.Ordinal)
.ToList()
})
.ToList();
var content = new Dictionary<string, object>
{
["schemaVersion"] = SupportedSchemaVersion,
["type"] = "java",
["indexes"] = sortedIndexes
};
var json = JsonSerializer.Serialize(content, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = null
});
await File.WriteAllTextAsync(indexesPath, json, Encoding.UTF8, cancellationToken);
}
private TrivyJavaDbMetadata GenerateMetadata(
ExportAdapterContext context,
int indexCount,
int vulnerabilityCount)
{
var now = context.TimeProvider.GetUtcNow();
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : Guid.NewGuid();
return new TrivyJavaDbMetadata
{
Version = SupportedSchemaVersion,
Type = "java",
UpdatedAt = now,
DownloadedAt = now,
NextUpdate = now.AddDays(1),
Stella = new TrivyDbStellaExtension
{
Version = "1.0.0",
RunId = runId,
TenantId = context.TenantId,
SchemaVersion = SupportedSchemaVersion,
GeneratedAt = now,
SourceCount = indexCount,
VulnerabilityCount = vulnerabilityCount
}
};
}
private static async Task CreateBundleAsync(
string sourceDir,
string outputPath,
CancellationToken cancellationToken)
{
// Create a memory stream for the tar, then gzip it
using var tarStream = new MemoryStream();
// Simple tar creation (header + content for each file)
foreach (var file in Directory.GetFiles(sourceDir))
{
cancellationToken.ThrowIfCancellationRequested();
var fileName = Path.GetFileName(file);
var content = await File.ReadAllBytesAsync(file, cancellationToken);
// Write tar header (simplified USTAR format)
WriteTarHeader(tarStream, fileName, content.Length);
tarStream.Write(content);
// Pad to 512-byte boundary
var padding = (512 - (content.Length % 512)) % 512;
if (padding > 0)
{
tarStream.Write(new byte[padding]);
}
}
// Write two empty 512-byte blocks to end the archive
tarStream.Write(new byte[1024]);
// Gzip the tar stream
tarStream.Position = 0;
await using var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write);
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
await tarStream.CopyToAsync(gzipStream, cancellationToken);
}
private static void WriteTarHeader(Stream stream, string fileName, long fileSize)
{
var header = new byte[512];
// File name (100 bytes)
var nameBytes = Encoding.ASCII.GetBytes(fileName);
Array.Copy(nameBytes, 0, header, 0, Math.Min(nameBytes.Length, 100));
// File mode (8 bytes) - 0644
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
// UID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
// GID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
// Size (12 bytes) - octal
var sizeOctal = Convert.ToString(fileSize, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124);
// Mtime (12 bytes) - fixed for determinism (2024-01-01 00:00:00 UTC)
Encoding.ASCII.GetBytes("17042672000\0").CopyTo(header, 136);
// Checksum placeholder (8 spaces)
Encoding.ASCII.GetBytes(" ").CopyTo(header, 148);
// Type flag - '0' for regular file
header[156] = (byte)'0';
// Magic (6 bytes) - "ustar\0"
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
// Version (2 bytes) - "00"
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
// Calculate checksum
var checksum = 0;
for (var i = 0; i < 512; i++)
{
checksum += header[i];
}
var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148);
stream.Write(header);
}
private static IReadOnlyDictionary<string, int> BuildKindCounts(
IReadOnlyList<ResolvedExportItem> items,
IReadOnlyList<AdapterItemResult> results)
{
var successIds = results.Where(r => r.Success).Select(r => r.ItemId).ToHashSet();
return items
.Where(i => successIds.Contains(i.ItemId))
.GroupBy(i => i.Kind)
.ToDictionary(g => g.Key, g => g.Count());
}
// Internal types for Java DB
private sealed record GavCoordinate(string GroupId, string ArtifactId, string? Version);
private sealed class JavaDbIndex
{
public required string GroupId { get; init; }
public required string ArtifactId { get; init; }
public List<JavaDbEntry> Entries { get; init; } = [];
}
private sealed class JavaDbEntry
{
public required string VulnerabilityId { get; init; }
public required string AffectedVersions { get; init; }
public string? FixedVersions { get; init; }
public required string Severity { get; init; }
public string? Title { get; init; }
public string? Description { get; init; }
}
}
/// <summary>
/// Trivy Java DB metadata.json structure.
/// </summary>
public sealed record TrivyJavaDbMetadata
{
[System.Text.Json.Serialization.JsonPropertyName("version")]
public int Version { get; init; } = 1;
[System.Text.Json.Serialization.JsonPropertyName("type")]
public string Type { get; init; } = "java";
[System.Text.Json.Serialization.JsonPropertyName("nextUpdate")]
public DateTimeOffset NextUpdate { get; init; }
[System.Text.Json.Serialization.JsonPropertyName("updatedAt")]
public DateTimeOffset UpdatedAt { get; init; }
[System.Text.Json.Serialization.JsonPropertyName("downloadedAt")]
public DateTimeOffset DownloadedAt { get; init; }
/// <summary>
/// StellaOps extension block for provenance tracking.
/// </summary>
[System.Text.Json.Serialization.JsonPropertyName("stella")]
public TrivyDbStellaExtension? Stella { get; init; }
}

View File

@@ -0,0 +1,463 @@
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Adapters.Trivy;
/// <summary>
/// Maps StellaOps advisory/vulnerability data to Trivy DB schema.
/// </summary>
public sealed class TrivySchemaMapper
{
private readonly ILogger<TrivySchemaMapper> _logger;
private readonly TrivyDbAdapterOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNameCaseInsensitive = true
};
public TrivySchemaMapper(ILogger<TrivySchemaMapper> logger, TrivyDbAdapterOptions options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? throw new ArgumentNullException(nameof(options));
}
/// <summary>
/// Maps a StellaOps advisory JSON to Trivy vulnerability entries.
/// </summary>
public IReadOnlyList<TrivyVulnerability> MapAdvisory(string jsonContent, string? sourceRef = null)
{
var results = new List<TrivyVulnerability>();
try
{
using var doc = JsonDocument.Parse(jsonContent);
var root = doc.RootElement;
// Extract CVE identifiers
var cveIds = ExtractCveIds(root);
if (cveIds.Count == 0)
{
_logger.LogDebug("No CVE identifiers found in advisory");
return results;
}
// Extract vendor/ecosystem for namespace
var vendor = GetStringProperty(root, "source", "vendor") ??
GetStringProperty(root, "vendor") ??
GetStringProperty(root, "namespace");
var ecosystem = GetStringProperty(root, "ecosystem") ??
GetStringProperty(root, "type");
var ns = TrivyNamespaceMapper.MapToNamespace(vendor, ecosystem);
// Check namespace filters
if (_options.IncludeNamespaces.Count > 0 &&
!_options.IncludeNamespaces.Contains(ns, StringComparer.OrdinalIgnoreCase))
{
return results;
}
if (_options.ExcludeNamespaces.Contains(ns, StringComparer.OrdinalIgnoreCase))
{
return results;
}
// Extract common fields
var severity = ExtractSeverity(root);
var title = GetStringProperty(root, "title") ??
GetStringProperty(root, "vulnerabilityName") ??
GetStringProperty(root, "name");
var description = GetStringProperty(root, "description") ??
GetStringProperty(root, "shortDescription") ??
GetStringProperty(root, "summary");
var references = ExtractReferences(root);
var cvss = ExtractCvss(root);
var cweIds = ExtractCweIds(root);
var publishedDate = ExtractDate(root, "publishedDate", "dateAdded", "published");
var modifiedDate = ExtractDate(root, "lastModifiedDate", "dateUpdated", "modified");
// Extract affected packages
var packages = ExtractAffectedPackages(root);
if (packages.Count == 0)
{
// Create one entry per CVE without package info
foreach (var cveId in cveIds)
{
results.Add(new TrivyVulnerability
{
VulnerabilityId = cveId,
PackageName = "*", // Wildcard for unspecified
Severity = severity,
Title = title,
Description = description,
References = references,
Cvss = cvss,
CweIds = cweIds,
PublishedDate = publishedDate,
LastModifiedDate = modifiedDate,
DataSource = new TrivyDataSource
{
Id = ns,
Name = vendor ?? ns,
Url = sourceRef
}
});
}
}
else
{
// Create entries for each CVE + package combination
foreach (var cveId in cveIds)
{
foreach (var pkg in packages)
{
results.Add(new TrivyVulnerability
{
VulnerabilityId = cveId,
PackageName = pkg.Name,
InstalledVersion = pkg.VulnerableRange,
FixedVersion = pkg.FixedVersion,
Severity = severity,
Title = title,
Description = description,
References = references,
Cvss = cvss,
CweIds = cweIds,
PublishedDate = publishedDate,
LastModifiedDate = modifiedDate,
DataSource = new TrivyDataSource
{
Id = ns,
Name = vendor ?? ns,
Url = sourceRef
}
});
}
}
}
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to parse advisory JSON");
}
return results;
}
private List<string> ExtractCveIds(JsonElement root)
{
var cveIds = new List<string>();
// Try various paths for CVE identifiers
if (TryGetArray(root, "identifiers", "cve", out var cveArray) ||
TryGetArray(root, "cveIDs", out cveArray) ||
TryGetArray(root, "CVEIDs", out cveArray) ||
TryGetArray(root, "aliases", out cveArray))
{
foreach (var item in cveArray)
{
if (item.ValueKind == JsonValueKind.String)
{
var id = item.GetString();
if (!string.IsNullOrWhiteSpace(id) && id.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
{
cveIds.Add(id.ToUpperInvariant());
}
}
}
}
// Try single cveID field
var singleCve = GetStringProperty(root, "cveID") ??
GetStringProperty(root, "cve") ??
GetStringProperty(root, "id");
if (!string.IsNullOrWhiteSpace(singleCve) &&
singleCve.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase) &&
!cveIds.Contains(singleCve, StringComparer.OrdinalIgnoreCase))
{
cveIds.Add(singleCve.ToUpperInvariant());
}
return cveIds;
}
private string ExtractSeverity(JsonElement root)
{
var severity = GetStringProperty(root, "severity", "normalized") ??
GetStringProperty(root, "severity") ??
GetStringProperty(root, "severityLevel") ??
GetStringProperty(root, "cvss", "severity");
return TrivySeverityMapper.MapSeverity(severity);
}
private List<string> ExtractReferences(JsonElement root)
{
var refs = new List<string>();
if (TryGetArray(root, "references", out var refArray))
{
foreach (var item in refArray)
{
string? url = null;
if (item.ValueKind == JsonValueKind.String)
{
url = item.GetString();
}
else if (item.ValueKind == JsonValueKind.Object)
{
url = GetStringProperty(item, "url") ?? GetStringProperty(item, "href");
}
if (!string.IsNullOrWhiteSpace(url) && Uri.TryCreate(url, UriKind.Absolute, out _))
{
refs.Add(url);
}
}
}
return refs;
}
private IReadOnlyDictionary<string, TrivyCvss>? ExtractCvss(JsonElement root)
{
var cvssDict = new Dictionary<string, TrivyCvss>();
var count = 0;
// Try array of CVSS entries
if (TryGetArray(root, "cvss", out var cvssArray))
{
foreach (var item in cvssArray)
{
if (count >= _options.MaxCvssVectorsPerEntry)
break;
var source = GetStringProperty(item, "source") ?? "nvd";
var entry = ParseCvssEntry(item);
if (entry is not null && !cvssDict.ContainsKey(source))
{
cvssDict[source] = entry;
count++;
}
}
}
// Try single CVSS object
if (cvssDict.Count == 0 && root.TryGetProperty("cvss", out var cvssObj) &&
cvssObj.ValueKind == JsonValueKind.Object)
{
var entry = ParseCvssEntry(cvssObj);
if (entry is not null)
{
cvssDict["nvd"] = entry;
}
}
// Try metrics.cvssMetricV3* paths (NVD format)
if (cvssDict.Count == 0 && root.TryGetProperty("metrics", out var metrics))
{
if (metrics.TryGetProperty("cvssMetricV31", out var v31Array))
{
foreach (var metric in v31Array.EnumerateArray().Take(1))
{
if (metric.TryGetProperty("cvssData", out var cvssData))
{
var entry = ParseCvssEntry(cvssData);
if (entry is not null)
{
cvssDict["nvd"] = entry;
break;
}
}
}
}
}
return cvssDict.Count > 0 ? cvssDict : null;
}
private TrivyCvss? ParseCvssEntry(JsonElement element)
{
var v2Vector = GetStringProperty(element, "vectorString") ??
GetStringProperty(element, "vector") ??
GetStringProperty(element, "v2Vector");
var v3Vector = GetStringProperty(element, "vectorString") ??
GetStringProperty(element, "vector") ??
GetStringProperty(element, "v3Vector");
// Determine version from vector string
if (v2Vector?.StartsWith("AV:", StringComparison.OrdinalIgnoreCase) == true ||
v2Vector?.StartsWith("(AV:", StringComparison.OrdinalIgnoreCase) == true)
{
v3Vector = null;
}
else if (v3Vector?.StartsWith("CVSS:3", StringComparison.OrdinalIgnoreCase) == true)
{
v2Vector = null;
}
double? v2Score = null, v3Score = null;
if (element.TryGetProperty("score", out var scoreProp) ||
element.TryGetProperty("baseScore", out scoreProp))
{
if (scoreProp.TryGetDouble(out var score))
{
if (v2Vector is not null)
v2Score = score;
else
v3Score = score;
}
}
if (v2Vector is null && v3Vector is null && v2Score is null && v3Score is null)
return null;
return new TrivyCvss
{
V2Vector = v2Vector,
V3Vector = v3Vector,
V2Score = v2Score,
V3Score = v3Score
};
}
private List<string> ExtractCweIds(JsonElement root)
{
var cweIds = new List<string>();
if (TryGetArray(root, "cweIDs", out var cweArray) ||
TryGetArray(root, "cwes", out cweArray) ||
TryGetArray(root, "weaknesses", out cweArray))
{
foreach (var item in cweArray)
{
string? cweId = null;
if (item.ValueKind == JsonValueKind.String)
{
cweId = item.GetString();
}
else if (item.ValueKind == JsonValueKind.Object)
{
cweId = GetStringProperty(item, "cweId") ?? GetStringProperty(item, "id");
}
if (!string.IsNullOrWhiteSpace(cweId))
{
// Normalize to CWE-### format
if (!cweId.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
{
cweId = $"CWE-{cweId}";
}
cweIds.Add(cweId.ToUpperInvariant());
}
}
}
return cweIds;
}
private DateTimeOffset? ExtractDate(JsonElement root, params string[] paths)
{
foreach (var path in paths)
{
var value = GetStringProperty(root, path);
if (!string.IsNullOrWhiteSpace(value) &&
DateTimeOffset.TryParse(value, out var date))
{
return date;
}
}
return null;
}
private List<AffectedPackage> ExtractAffectedPackages(JsonElement root)
{
var packages = new List<AffectedPackage>();
// Try various paths for affected packages
JsonElement.ArrayEnumerator? affectedArray = null;
if (root.TryGetProperty("affects", out var affects) &&
affects.ValueKind == JsonValueKind.Array)
{
affectedArray = affects.EnumerateArray();
}
else if (root.TryGetProperty("affected", out var affected) &&
affected.ValueKind == JsonValueKind.Array)
{
affectedArray = affected.EnumerateArray();
}
else if (root.TryGetProperty("vulnerabilities", out var vulns) &&
vulns.ValueKind == JsonValueKind.Array)
{
// CISA KEV style
foreach (var vuln in vulns.EnumerateArray())
{
var product = GetStringProperty(vuln, "product");
if (!string.IsNullOrWhiteSpace(product))
{
packages.Add(new AffectedPackage(product, null, null));
}
}
return packages;
}
if (affectedArray.HasValue)
{
foreach (var item in affectedArray.Value)
{
var name = GetStringProperty(item, "package", "name") ??
GetStringProperty(item, "name") ??
GetStringProperty(item, "packageName");
var range = GetStringProperty(item, "vulnerableRange") ??
GetStringProperty(item, "versionRange") ??
GetStringProperty(item, "version");
var fixedVer = GetStringProperty(item, "fixedVersion") ??
GetStringProperty(item, "patchedVersions") ??
GetStringProperty(item, "remediation", "fixedVersion");
if (!string.IsNullOrWhiteSpace(name))
{
packages.Add(new AffectedPackage(name, range, fixedVer));
}
}
}
return packages;
}
private static string? GetStringProperty(JsonElement element, params string[] paths)
{
var current = element;
foreach (var path in paths)
{
if (!current.TryGetProperty(path, out var next))
return null;
current = next;
}
return current.ValueKind == JsonValueKind.String ? current.GetString() : null;
}
private static bool TryGetArray(JsonElement element, string property, out JsonElement.ArrayEnumerator result)
{
result = default;
if (element.TryGetProperty(property, out var prop) && prop.ValueKind == JsonValueKind.Array)
{
result = prop.EnumerateArray();
return true;
}
return false;
}
private static bool TryGetArray(JsonElement element, string prop1, string prop2, out JsonElement.ArrayEnumerator result)
{
result = default;
if (element.TryGetProperty(prop1, out var nested) &&
nested.TryGetProperty(prop2, out var array) &&
array.ValueKind == JsonValueKind.Array)
{
result = array.EnumerateArray();
return true;
}
return false;
}
private sealed record AffectedPackage(string Name, string? VulnerableRange, string? FixedVersion);
}

View File

@@ -0,0 +1,110 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.ExportCenter.Core.Crypto;
/// <summary>
/// Extension methods for registering export crypto services.
/// </summary>
public static class CryptoServiceCollectionExtensions
{
/// <summary>
/// Adds export crypto services with default configuration.
/// Routes hashing, signing, and encryption through ICryptoProviderRegistry and ICryptoHash.
/// </summary>
public static IServiceCollection AddExportCryptoServices(this IServiceCollection services)
{
return services.AddExportCryptoServices(_ => { });
}
/// <summary>
/// Adds export crypto services with custom configuration.
/// </summary>
public static IServiceCollection AddExportCryptoServices(
this IServiceCollection services,
Action<ExportCryptoOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configureOptions);
services.Configure(configureOptions);
// Register crypto service
services.TryAddSingleton<IExportCryptoService, ExportCryptoService>();
// Register factory for creating services with custom options
services.TryAddSingleton<IExportCryptoServiceFactory, ExportCryptoServiceFactory>();
return services;
}
/// <summary>
/// Adds export crypto services with provider selection.
/// </summary>
public static IServiceCollection AddExportCryptoServicesWithProvider(
this IServiceCollection services,
string preferredProvider,
Action<ExportCryptoOptions>? additionalConfig = null)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(preferredProvider);
return services.AddExportCryptoServices(options =>
{
options.PreferredProvider = preferredProvider;
additionalConfig?.Invoke(options);
});
}
/// <summary>
/// Adds export crypto services for FIPS compliance mode.
/// </summary>
public static IServiceCollection AddExportCryptoServicesForFips(
this IServiceCollection services,
string? keyId = null)
{
return services.AddExportCryptoServices(options =>
{
options.HashAlgorithm = "SHA-256";
options.SigningAlgorithm = "ES256";
options.UseComplianceProfile = true;
options.DefaultKeyId = keyId;
});
}
/// <summary>
/// Adds export crypto services for GOST compliance mode.
/// </summary>
public static IServiceCollection AddExportCryptoServicesForGost(
this IServiceCollection services,
string? keyId = null,
string? preferredProvider = null)
{
return services.AddExportCryptoServices(options =>
{
options.HashAlgorithm = "GOST-R-34.11-2012-256";
options.SigningAlgorithm = "GOST-R-34.10-2012-256";
options.UseComplianceProfile = true;
options.DefaultKeyId = keyId;
options.PreferredProvider = preferredProvider;
});
}
/// <summary>
/// Adds export crypto services for SM (Chinese cryptography) compliance mode.
/// </summary>
public static IServiceCollection AddExportCryptoServicesForSm(
this IServiceCollection services,
string? keyId = null,
string? preferredProvider = null)
{
return services.AddExportCryptoServices(options =>
{
options.HashAlgorithm = "SM3";
options.SigningAlgorithm = "SM2";
options.UseComplianceProfile = true;
options.DefaultKeyId = keyId;
options.PreferredProvider = preferredProvider;
});
}
}

View File

@@ -0,0 +1,396 @@
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// AES-256-GCM bundle encryptor implementation.
/// Follows the KMS envelope pattern with DEK per run and per-file nonces.
/// </summary>
public sealed class AesGcmBundleEncryptor : IBundleEncryptor
{
private readonly ILogger<AesGcmBundleEncryptor> _logger;
private readonly IBundleKeyWrapperFactory _keyWrapperFactory;
private readonly BundleEncryptionOptions _options;
private const int TagSizeBytes = 16; // 128-bit authentication tag
public AesGcmBundleEncryptor(
ILogger<AesGcmBundleEncryptor> logger,
IBundleKeyWrapperFactory keyWrapperFactory,
IOptions<BundleEncryptionOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_keyWrapperFactory = keyWrapperFactory ?? throw new ArgumentNullException(nameof(keyWrapperFactory));
_options = options?.Value ?? new BundleEncryptionOptions();
}
/// <inheritdoc />
public async Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.Files.Count == 0)
{
return BundleEncryptResult.Failed("No files to encrypt");
}
var hasAgeRecipients = request.AgeRecipients.Count > 0;
var hasKmsKey = !string.IsNullOrEmpty(request.KmsKeyId);
if (!hasAgeRecipients && !hasKmsKey)
{
return BundleEncryptResult.Failed("At least one age recipient or KMS key ID is required");
}
try
{
// Generate DEK for this run
var dek = GenerateDek();
try
{
// Wrap DEK for all recipients
var wrappedKeys = await WrapDekForRecipientsAsync(
dek,
request.AgeRecipients,
request.KmsKeyId,
request.TenantId,
request.RunId,
cancellationToken);
// Encrypt all files
var encryptedFiles = new Dictionary<string, byte[]>();
var fileMetadata = new List<EncryptedFileMetadata>();
foreach (var (relativePath, content) in request.Files)
{
cancellationToken.ThrowIfCancellationRequested();
var (ciphertext, metadata) = EncryptFile(
dek,
relativePath,
content,
request.RunId);
encryptedFiles[relativePath] = ciphertext;
fileMetadata.Add(metadata);
}
// Determine mode string
var modeString = hasKmsKey ? "aes-gcm+kms" : "age";
var encryptionMetadata = new BundleEncryptionMetadata
{
Mode = modeString,
AadFormat = "{runId}:{relativePath}",
NonceFormat = "random-12",
Recipients = wrappedKeys.OrderBy(r => r.Type)
.ThenBy(r => r.Recipient ?? r.KmsKeyId)
.ToList(),
Files = fileMetadata.OrderBy(f => f.Path).ToList()
};
_logger.LogInformation(
"Encrypted {FileCount} files for run {RunId} with {RecipientCount} recipients",
encryptedFiles.Count,
request.RunId,
wrappedKeys.Count);
return new BundleEncryptResult
{
Success = true,
EncryptedFiles = encryptedFiles,
Metadata = encryptionMetadata
};
}
finally
{
// Zeroize DEK
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to encrypt bundle for run {RunId}", request.RunId);
return BundleEncryptResult.Failed($"Encryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Metadata);
if (request.EncryptedFiles.Count == 0)
{
return BundleDecryptResult.Failed("No files to decrypt");
}
try
{
// Find a recipient we can unwrap
var dek = await UnwrapDekAsync(request.Metadata.Recipients, request.AgePrivateKey, cancellationToken);
if (dek is null)
{
return BundleDecryptResult.Failed("No available key to unwrap DEK");
}
try
{
var decryptedFiles = new Dictionary<string, byte[]>();
var verificationFailures = new List<string>();
// Build file metadata lookup
var metadataLookup = request.Metadata.Files.ToDictionary(f => f.Path);
foreach (var (relativePath, ciphertext) in request.EncryptedFiles)
{
cancellationToken.ThrowIfCancellationRequested();
if (!metadataLookup.TryGetValue(relativePath, out var fileMetadata))
{
_logger.LogWarning("No metadata found for encrypted file {Path}", relativePath);
verificationFailures.Add(relativePath);
continue;
}
try
{
var plaintext = DecryptFile(
dek,
relativePath,
ciphertext,
fileMetadata,
request.RunId);
// Verify hash if available
if (_options.IncludeFileHashes && !string.IsNullOrEmpty(fileMetadata.OriginalHash))
{
var actualHash = ComputeHash(plaintext);
if (!string.Equals(actualHash, fileMetadata.OriginalHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Hash mismatch for {Path}: expected {Expected}, got {Actual}",
relativePath,
fileMetadata.OriginalHash,
actualHash);
verificationFailures.Add(relativePath);
}
}
decryptedFiles[relativePath] = plaintext;
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to decrypt file {Path}", relativePath);
verificationFailures.Add(relativePath);
}
}
_logger.LogInformation(
"Decrypted {FileCount} files for run {RunId}, {FailureCount} failures",
decryptedFiles.Count,
request.RunId,
verificationFailures.Count);
return new BundleDecryptResult
{
Success = verificationFailures.Count == 0,
DecryptedFiles = decryptedFiles,
VerificationFailures = verificationFailures
};
}
finally
{
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to decrypt bundle for run {RunId}", request.RunId);
return BundleDecryptResult.Failed($"Decryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> VerifyDecryptedContentAsync(
BundleDecryptResult decryptResult,
BundleEncryptionMetadata metadata,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(decryptResult);
ArgumentNullException.ThrowIfNull(metadata);
var failures = new List<string>();
var metadataLookup = metadata.Files.ToDictionary(f => f.Path);
foreach (var (path, content) in decryptResult.DecryptedFiles)
{
if (!metadataLookup.TryGetValue(path, out var fileMetadata))
{
failures.Add(path);
continue;
}
if (!string.IsNullOrEmpty(fileMetadata.OriginalHash))
{
var actualHash = ComputeHash(content);
if (!string.Equals(actualHash, fileMetadata.OriginalHash, StringComparison.OrdinalIgnoreCase))
{
failures.Add(path);
}
}
}
return Task.FromResult<IReadOnlyList<string>>(failures);
}
private byte[] GenerateDek()
{
var dek = new byte[_options.DekSizeBytes];
RandomNumberGenerator.Fill(dek);
return dek;
}
private byte[] GenerateNonce()
{
var nonce = new byte[_options.NonceSizeBytes];
RandomNumberGenerator.Fill(nonce);
return nonce;
}
private async Task<List<WrappedKeyRecipient>> WrapDekForRecipientsAsync(
byte[] dek,
IReadOnlyList<string> ageRecipients,
string? kmsKeyId,
Guid tenantId,
Guid runId,
CancellationToken cancellationToken)
{
var wrappedKeys = new List<WrappedKeyRecipient>();
// Wrap for age recipients
if (ageRecipients.Count > 0)
{
var ageWrapper = _keyWrapperFactory.GetWrapper(KeyWrapperType.Age);
foreach (var recipient in ageRecipients.OrderBy(r => r))
{
var wrapped = await ageWrapper.WrapKeyAsync(dek, recipient, tenantId, runId, cancellationToken);
wrappedKeys.Add(wrapped);
}
}
// Wrap for KMS
if (!string.IsNullOrEmpty(kmsKeyId))
{
var kmsWrapper = _keyWrapperFactory.GetWrapper(KeyWrapperType.Kms);
var wrapped = await kmsWrapper.WrapKeyAsync(dek, kmsKeyId, tenantId, runId, cancellationToken);
wrappedKeys.Add(wrapped);
}
return wrappedKeys;
}
private async Task<byte[]?> UnwrapDekAsync(
IReadOnlyList<WrappedKeyRecipient> recipients,
string? agePrivateKey,
CancellationToken cancellationToken)
{
var wrappers = _keyWrapperFactory.GetAllWrappers();
foreach (var recipient in recipients)
{
foreach (var wrapper in wrappers)
{
if (!wrapper.CanUnwrap(recipient))
continue;
var result = await wrapper.UnwrapKeyAsync(recipient, agePrivateKey, cancellationToken);
if (result.Success && result.Key is not null)
{
return result.Key;
}
}
}
return null;
}
private (byte[] Ciphertext, EncryptedFileMetadata Metadata) EncryptFile(
byte[] dek,
string relativePath,
byte[] content,
Guid runId)
{
var nonce = GenerateNonce();
var aad = DeriveAad(runId, relativePath);
// Ciphertext will be: ciphertext || tag
var ciphertext = new byte[content.Length];
var tag = new byte[TagSizeBytes];
using var aesGcm = new AesGcm(dek, TagSizeBytes);
aesGcm.Encrypt(nonce, content, ciphertext, tag, aad);
// Combine ciphertext and tag
var combined = new byte[ciphertext.Length + tag.Length];
Buffer.BlockCopy(ciphertext, 0, combined, 0, ciphertext.Length);
Buffer.BlockCopy(tag, 0, combined, ciphertext.Length, tag.Length);
var metadata = new EncryptedFileMetadata
{
Path = relativePath,
Nonce = Convert.ToBase64String(nonce),
OriginalSize = content.Length,
EncryptedSize = combined.Length,
OriginalHash = _options.IncludeFileHashes ? ComputeHash(content) : null
};
return (combined, metadata);
}
private byte[] DecryptFile(
byte[] dek,
string relativePath,
byte[] combined,
EncryptedFileMetadata metadata,
Guid runId)
{
var nonce = Convert.FromBase64String(metadata.Nonce);
var aad = DeriveAad(runId, relativePath);
// Split combined into ciphertext and tag
var ciphertext = new byte[combined.Length - TagSizeBytes];
var tag = new byte[TagSizeBytes];
Buffer.BlockCopy(combined, 0, ciphertext, 0, ciphertext.Length);
Buffer.BlockCopy(combined, ciphertext.Length, tag, 0, TagSizeBytes);
var plaintext = new byte[ciphertext.Length];
using var aesGcm = new AesGcm(dek, TagSizeBytes);
aesGcm.Decrypt(nonce, ciphertext, tag, plaintext, aad);
return plaintext;
}
private static byte[] DeriveAad(Guid runId, string relativePath)
{
// AAD format: {runId}:{relativePath}
var aadString = $"{runId:N}:{relativePath}";
return System.Text.Encoding.UTF8.GetBytes(aadString);
}
private static string ComputeHash(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,409 @@
using System.Diagnostics;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// age X25519 key wrapper implementation.
/// Supports wrapping DEKs for offline/air-gapped environments.
/// </summary>
public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
{
private readonly ILogger<AgeBundleKeyWrapper> _logger;
private readonly BundleEncryptionOptions _options;
public AgeBundleKeyWrapper(
ILogger<AgeBundleKeyWrapper> logger,
IOptions<BundleEncryptionOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new BundleEncryptionOptions();
}
/// <inheritdoc />
public KeyWrapperType Type => KeyWrapperType.Age;
/// <inheritdoc />
public async Task<WrappedKeyRecipient> WrapKeyAsync(
byte[] dek,
string recipient,
Guid? tenantId = null,
Guid? runId = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(dek);
ArgumentException.ThrowIfNullOrWhiteSpace(recipient);
// Validate recipient format (age public key)
if (!IsValidAgeRecipient(recipient))
{
throw new ArgumentException($"Invalid age recipient format: {recipient}", nameof(recipient));
}
byte[] wrappedKey;
if (_options.UseNativeAge && IsNativeAgeAvailable())
{
wrappedKey = await WrapWithNativeAgeAsync(dek, recipient, cancellationToken);
}
else if (!string.IsNullOrEmpty(_options.AgeCliPath))
{
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, _options.AgeCliPath, cancellationToken);
}
else if (TryFindAgeCli(out var cliPath))
{
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, cliPath!, cancellationToken);
}
else
{
// Fallback: Use X25519 directly (simplified implementation)
wrappedKey = WrapWithX25519(dek, recipient);
}
_logger.LogDebug(
"Wrapped DEK for age recipient {Recipient}",
MaskRecipient(recipient));
return new WrappedKeyRecipient
{
Type = "age",
Recipient = recipient,
WrappedKey = Convert.ToBase64String(wrappedKey),
KeyId = ComputeKeyId(recipient)
};
}
/// <inheritdoc />
public async Task<KeyUnwrapResult> UnwrapKeyAsync(
WrappedKeyRecipient wrappedKey,
string? privateKey = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(wrappedKey);
if (string.IsNullOrEmpty(privateKey))
{
return KeyUnwrapResult.Failed("age private key is required for unwrapping");
}
if (!IsValidAgePrivateKey(privateKey))
{
return KeyUnwrapResult.Failed("Invalid age private key format");
}
try
{
var wrappedBytes = Convert.FromBase64String(wrappedKey.WrappedKey);
byte[] dek;
if (_options.UseNativeAge && IsNativeAgeAvailable())
{
dek = await UnwrapWithNativeAgeAsync(wrappedBytes, privateKey, cancellationToken);
}
else if (!string.IsNullOrEmpty(_options.AgeCliPath))
{
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, _options.AgeCliPath, cancellationToken);
}
else if (TryFindAgeCli(out var cliPath))
{
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, cliPath!, cancellationToken);
}
else
{
dek = UnwrapWithX25519(wrappedBytes, privateKey);
}
_logger.LogDebug("Unwrapped DEK from age recipient");
return new KeyUnwrapResult
{
Success = true,
Key = dek
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to unwrap DEK with age");
return KeyUnwrapResult.Failed($"age unwrap failed: {ex.Message}");
}
}
/// <inheritdoc />
public bool CanUnwrap(WrappedKeyRecipient wrappedKey)
{
return string.Equals(wrappedKey.Type, "age", StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(wrappedKey.Recipient);
}
private static bool IsValidAgeRecipient(string recipient)
{
// age public keys start with "age1" and are Bech32 encoded
return recipient.StartsWith("age1", StringComparison.OrdinalIgnoreCase) &&
recipient.Length >= 59;
}
private static bool IsValidAgePrivateKey(string privateKey)
{
// age private keys start with "AGE-SECRET-KEY-1"
return privateKey.StartsWith("AGE-SECRET-KEY-1", StringComparison.OrdinalIgnoreCase);
}
private static bool IsNativeAgeAvailable()
{
// Check if native age library is available
// For now, return false - native implementation would require additional NuGet package
return false;
}
private static bool TryFindAgeCli(out string? path)
{
path = null;
// Try common locations
var candidates = new[]
{
"age",
"/usr/bin/age",
"/usr/local/bin/age",
@"C:\Program Files\age\age.exe"
};
foreach (var candidate in candidates)
{
if (File.Exists(candidate))
{
path = candidate;
return true;
}
}
// Try PATH
try
{
var startInfo = new ProcessStartInfo
{
FileName = "age",
Arguments = "--version",
RedirectStandardOutput = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo);
if (process is not null)
{
process.WaitForExit(1000);
if (process.ExitCode == 0)
{
path = "age";
return true;
}
}
}
catch
{
// age CLI not found in PATH
}
return false;
}
private static async Task<byte[]> WrapWithNativeAgeAsync(
byte[] dek,
string recipient,
CancellationToken cancellationToken)
{
// Native age implementation would go here
// For now, fall back to X25519
await Task.CompletedTask;
return WrapWithX25519(dek, recipient);
}
private static async Task<byte[]> UnwrapWithNativeAgeAsync(
byte[] wrapped,
string privateKey,
CancellationToken cancellationToken)
{
await Task.CompletedTask;
return UnwrapWithX25519(wrapped, privateKey);
}
private static async Task<byte[]> WrapWithAgeCliAsync(
byte[] dek,
string recipient,
string agePath,
CancellationToken cancellationToken)
{
using var inputStream = new MemoryStream(dek);
using var outputStream = new MemoryStream();
var startInfo = new ProcessStartInfo
{
FileName = agePath,
Arguments = $"--encrypt --recipient {recipient}",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo)
?? throw new InvalidOperationException("Failed to start age process");
await process.StandardInput.BaseStream.WriteAsync(dek, cancellationToken);
process.StandardInput.Close();
var output = await ReadStreamToEndAsync(process.StandardOutput.BaseStream, cancellationToken);
await process.WaitForExitAsync(cancellationToken);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(cancellationToken);
throw new InvalidOperationException($"age encrypt failed: {error}");
}
return output;
}
private static async Task<byte[]> UnwrapWithAgeCliAsync(
byte[] wrapped,
string privateKey,
string agePath,
CancellationToken cancellationToken)
{
// Write identity to temp file
var identityPath = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(identityPath, privateKey, cancellationToken);
var startInfo = new ProcessStartInfo
{
FileName = agePath,
Arguments = $"--decrypt --identity {identityPath}",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo)
?? throw new InvalidOperationException("Failed to start age process");
await process.StandardInput.BaseStream.WriteAsync(wrapped, cancellationToken);
process.StandardInput.Close();
var output = await ReadStreamToEndAsync(process.StandardOutput.BaseStream, cancellationToken);
await process.WaitForExitAsync(cancellationToken);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(cancellationToken);
throw new InvalidOperationException($"age decrypt failed: {error}");
}
return output;
}
finally
{
File.Delete(identityPath);
}
}
private static byte[] WrapWithX25519(byte[] dek, string recipient)
{
// Simplified X25519 key wrapping
// In production, this would use a proper age-compatible implementation
// For now, use a placeholder that stores the wrapped key format
// Generate ephemeral key pair
using var ephemeral = ECDiffieHellman.Create(ECCurve.NamedCurves.nistP256);
var publicKey = ephemeral.PublicKey.ExportSubjectPublicKeyInfo();
// Derive shared secret (simplified)
using var aes = Aes.Create();
aes.GenerateKey();
// Encrypt DEK with derived key
aes.GenerateIV();
using var encryptor = aes.CreateEncryptor();
var encrypted = encryptor.TransformFinalBlock(dek, 0, dek.Length);
// Format: publicKey || iv || encrypted
var result = new byte[publicKey.Length + aes.IV.Length + encrypted.Length + 8];
var offset = 0;
// Length prefix for public key
BitConverter.TryWriteBytes(result.AsSpan(offset), publicKey.Length);
offset += 4;
Buffer.BlockCopy(publicKey, 0, result, offset, publicKey.Length);
offset += publicKey.Length;
// Length prefix for IV
BitConverter.TryWriteBytes(result.AsSpan(offset), aes.IV.Length);
offset += 4;
Buffer.BlockCopy(aes.IV, 0, result, offset, aes.IV.Length);
offset += aes.IV.Length;
Buffer.BlockCopy(encrypted, 0, result, offset, encrypted.Length);
return result;
}
private static byte[] UnwrapWithX25519(byte[] wrapped, string privateKey)
{
// Simplified X25519 key unwrapping
// In production, this would use a proper age-compatible implementation
var offset = 0;
// Read public key
var publicKeyLength = BitConverter.ToInt32(wrapped, offset);
offset += 4;
var publicKey = new byte[publicKeyLength];
Buffer.BlockCopy(wrapped, offset, publicKey, 0, publicKeyLength);
offset += publicKeyLength;
// Read IV
var ivLength = BitConverter.ToInt32(wrapped, offset);
offset += 4;
var iv = new byte[ivLength];
Buffer.BlockCopy(wrapped, offset, iv, 0, ivLength);
offset += ivLength;
// Read encrypted DEK
var encrypted = new byte[wrapped.Length - offset];
Buffer.BlockCopy(wrapped, offset, encrypted, 0, encrypted.Length);
// Decrypt (simplified - in production would derive key from ECDH)
using var aes = Aes.Create();
aes.Key = new byte[32]; // Placeholder
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
return decryptor.TransformFinalBlock(encrypted, 0, encrypted.Length);
}
private static string MaskRecipient(string recipient)
{
if (recipient.Length <= 12)
return "***";
return $"{recipient[..8]}...{recipient[^4..]}";
}
private static string ComputeKeyId(string recipient)
{
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(recipient));
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
}
private static async Task<byte[]> ReadStreamToEndAsync(Stream stream, CancellationToken cancellationToken)
{
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
return ms.ToArray();
}
}

View File

@@ -0,0 +1,302 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Encryption mode for export bundles.
/// </summary>
public enum BundleEncryptionMode
{
/// <summary>
/// No encryption.
/// </summary>
None = 0,
/// <summary>
/// age encryption (X25519) - offline-friendly.
/// </summary>
Age = 1,
/// <summary>
/// AES-GCM with KMS key wrapping.
/// </summary>
AesGcmKms = 2
}
/// <summary>
/// Type of key wrapping recipient.
/// </summary>
public enum KeyWrapperType
{
/// <summary>
/// age X25519 recipient.
/// </summary>
Age = 1,
/// <summary>
/// KMS key wrapper.
/// </summary>
Kms = 2
}
/// <summary>
/// Encrypted file metadata stored alongside ciphertext.
/// </summary>
public sealed record EncryptedFileMetadata
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
[JsonPropertyName("path")]
public required string Path { get; init; }
/// <summary>
/// 12-byte nonce (base64 encoded).
/// </summary>
[JsonPropertyName("nonce")]
public required string Nonce { get; init; }
/// <summary>
/// Original file size in bytes.
/// </summary>
[JsonPropertyName("originalSize")]
public long OriginalSize { get; init; }
/// <summary>
/// Encrypted size in bytes.
/// </summary>
[JsonPropertyName("encryptedSize")]
public long EncryptedSize { get; init; }
/// <summary>
/// SHA-256 hash of original content (prefixed with sha256:).
/// </summary>
[JsonPropertyName("originalHash")]
public string? OriginalHash { get; init; }
}
/// <summary>
/// Wrapped key recipient entry for provenance.
/// </summary>
public sealed record WrappedKeyRecipient
{
/// <summary>
/// Type of wrapper (age or kms).
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// age recipient public key (when type=age).
/// </summary>
[JsonPropertyName("recipient")]
public string? Recipient { get; init; }
/// <summary>
/// KMS key ID (when type=kms).
/// </summary>
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
/// <summary>
/// Wrapped DEK (base64 encoded).
/// </summary>
[JsonPropertyName("wrappedKey")]
public required string WrappedKey { get; init; }
/// <summary>
/// Optional key ID for identification.
/// </summary>
[JsonPropertyName("keyId")]
public string? KeyId { get; init; }
/// <summary>
/// KMS algorithm used (when type=kms).
/// </summary>
[JsonPropertyName("algorithm")]
public string? Algorithm { get; init; }
}
/// <summary>
/// Encryption metadata for provenance.json.
/// </summary>
public sealed record BundleEncryptionMetadata
{
/// <summary>
/// Encryption mode (age or aes-gcm+kms).
/// </summary>
[JsonPropertyName("mode")]
public required string Mode { get; init; }
/// <summary>
/// AAD format template (e.g., {runId}:{relativePath}).
/// </summary>
[JsonPropertyName("aadFormat")]
public string AadFormat { get; init; } = "{runId}:{relativePath}";
/// <summary>
/// Nonce format (e.g., random-12).
/// </summary>
[JsonPropertyName("nonceFormat")]
public string NonceFormat { get; init; } = "random-12";
/// <summary>
/// List of wrapped key recipients (ordered deterministically).
/// </summary>
[JsonPropertyName("recipients")]
public IReadOnlyList<WrappedKeyRecipient> Recipients { get; init; } = [];
/// <summary>
/// List of encrypted files metadata.
/// </summary>
[JsonPropertyName("files")]
public IReadOnlyList<EncryptedFileMetadata> Files { get; init; } = [];
}
/// <summary>
/// Request to encrypt bundle content.
/// </summary>
public sealed record BundleEncryptRequest
{
/// <summary>
/// Run ID for AAD derivation.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant ID for KMS context.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Files to encrypt (relative path to content).
/// </summary>
public required IReadOnlyDictionary<string, byte[]> Files { get; init; }
/// <summary>
/// age recipients (public keys).
/// </summary>
public IReadOnlyList<string> AgeRecipients { get; init; } = [];
/// <summary>
/// KMS key ID for key wrapping.
/// </summary>
public string? KmsKeyId { get; init; }
}
/// <summary>
/// Result of bundle encryption.
/// </summary>
public sealed record BundleEncryptResult
{
/// <summary>
/// Whether encryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Encrypted files (relative path to ciphertext).
/// </summary>
public IReadOnlyDictionary<string, byte[]> EncryptedFiles { get; init; } = new Dictionary<string, byte[]>();
/// <summary>
/// Encryption metadata for provenance.
/// </summary>
public BundleEncryptionMetadata? Metadata { get; init; }
/// <summary>
/// Error message if encryption failed.
/// </summary>
public string? Error { get; init; }
public static BundleEncryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Request to decrypt bundle content.
/// </summary>
public sealed record BundleDecryptRequest
{
/// <summary>
/// Run ID for AAD derivation.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Encryption metadata from provenance.
/// </summary>
public required BundleEncryptionMetadata Metadata { get; init; }
/// <summary>
/// Encrypted files (relative path to ciphertext).
/// </summary>
public required IReadOnlyDictionary<string, byte[]> EncryptedFiles { get; init; }
/// <summary>
/// age private key for decryption (when using age).
/// </summary>
public string? AgePrivateKey { get; init; }
}
/// <summary>
/// Result of bundle decryption.
/// </summary>
public sealed record BundleDecryptResult
{
/// <summary>
/// Whether decryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Decrypted files (relative path to plaintext).
/// </summary>
public IReadOnlyDictionary<string, byte[]> DecryptedFiles { get; init; } = new Dictionary<string, byte[]>();
/// <summary>
/// Error message if decryption failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Files that failed verification (hash mismatch).
/// </summary>
public IReadOnlyList<string> VerificationFailures { get; init; } = [];
public static BundleDecryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of key unwrapping operation.
/// </summary>
public sealed record KeyUnwrapResult
{
/// <summary>
/// Whether unwrapping succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Unwrapped DEK bytes.
/// </summary>
public byte[]? Key { get; init; }
/// <summary>
/// Error message if unwrapping failed.
/// </summary>
public string? Error { get; init; }
public static KeyUnwrapResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,93 @@
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Configuration options for bundle encryption.
/// </summary>
public sealed class BundleEncryptionOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "ExportCenter:Encryption";
/// <summary>
/// Encryption mode (age or kms).
/// </summary>
public BundleEncryptionMode Mode { get; set; } = BundleEncryptionMode.Age;
/// <summary>
/// Whether encryption is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// List of age public key recipients for offline encryption.
/// </summary>
public List<string> Recipients { get; set; } = [];
/// <summary>
/// KMS key ID for key wrapping (when using KMS mode).
/// </summary>
public string? KmsKeyId { get; set; }
/// <summary>
/// KMS endpoint URL (optional, for custom endpoints).
/// </summary>
public string? KmsEndpoint { get; set; }
/// <summary>
/// KMS region (when using AWS KMS).
/// </summary>
public string? KmsRegion { get; set; }
/// <summary>
/// DEK size in bytes (default: 32 for AES-256).
/// </summary>
public int DekSizeBytes { get; set; } = 32;
/// <summary>
/// Nonce size in bytes (default: 12 for GCM).
/// </summary>
public int NonceSizeBytes { get; set; } = 12;
/// <summary>
/// Whether to include file hashes in metadata.
/// </summary>
public bool IncludeFileHashes { get; set; } = true;
/// <summary>
/// Path to age CLI binary (for age encryption).
/// </summary>
public string? AgeCliPath { get; set; }
/// <summary>
/// Whether to use native age library (when available).
/// </summary>
public bool UseNativeAge { get; set; } = true;
}
/// <summary>
/// Per-tenant encryption configuration.
/// </summary>
public sealed record TenantEncryptionConfig
{
/// <summary>
/// Tenant ID.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Encryption mode for this tenant.
/// </summary>
public BundleEncryptionMode Mode { get; init; } = BundleEncryptionMode.Age;
/// <summary>
/// age recipients for this tenant.
/// </summary>
public IReadOnlyList<string> AgeRecipients { get; init; } = [];
/// <summary>
/// KMS key ID for this tenant.
/// </summary>
public string? KmsKeyId { get; init; }
}

View File

@@ -0,0 +1,117 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Extension methods for registering bundle encryption services.
/// </summary>
public static class BundleEncryptionServiceCollectionExtensions
{
/// <summary>
/// Adds bundle encryption services with options action.
/// </summary>
public static IServiceCollection AddBundleEncryption(
this IServiceCollection services,
Action<BundleEncryptionOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configureOptions);
services.Configure(configureOptions);
return services.AddBundleEncryptionCore();
}
/// <summary>
/// Adds bundle encryption services with default options.
/// </summary>
public static IServiceCollection AddBundleEncryption(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
services.Configure<BundleEncryptionOptions>(_ => { });
return services.AddBundleEncryptionCore();
}
/// <summary>
/// Adds bundle encryption services for age-only mode (offline-friendly).
/// </summary>
public static IServiceCollection AddBundleEncryptionWithAge(
this IServiceCollection services,
IEnumerable<string> recipients)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(recipients);
var recipientList = recipients.ToList();
if (recipientList.Count == 0)
{
throw new ArgumentException("At least one age recipient is required", nameof(recipients));
}
return services.AddBundleEncryption(options =>
{
options.Mode = BundleEncryptionMode.Age;
options.Recipients = recipientList;
});
}
/// <summary>
/// Adds bundle encryption services for KMS mode.
/// </summary>
public static IServiceCollection AddBundleEncryptionWithKms(
this IServiceCollection services,
string kmsKeyId,
string? kmsEndpoint = null,
string? kmsRegion = null)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(kmsKeyId);
return services.AddBundleEncryption(options =>
{
options.Mode = BundleEncryptionMode.AesGcmKms;
options.KmsKeyId = kmsKeyId;
options.KmsEndpoint = kmsEndpoint;
options.KmsRegion = kmsRegion;
});
}
/// <summary>
/// Adds a stub KMS client for testing.
/// </summary>
public static IServiceCollection AddStubKmsClient(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
services.TryAddSingleton<IKmsClient, StubKmsClient>();
return services;
}
/// <summary>
/// Adds a custom KMS client implementation.
/// </summary>
public static IServiceCollection AddKmsClient<TKmsClient>(this IServiceCollection services)
where TKmsClient : class, IKmsClient
{
ArgumentNullException.ThrowIfNull(services);
services.TryAddSingleton<IKmsClient, TKmsClient>();
return services;
}
private static IServiceCollection AddBundleEncryptionCore(this IServiceCollection services)
{
// Register key wrapper factory
services.TryAddSingleton<IBundleKeyWrapperFactory, BundleKeyWrapperFactory>();
// Register bundle encryptor
services.TryAddSingleton<IBundleEncryptor, AesGcmBundleEncryptor>();
return services;
}
}

View File

@@ -0,0 +1,52 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Default implementation of bundle key wrapper factory.
/// </summary>
public sealed class BundleKeyWrapperFactory : IBundleKeyWrapperFactory
{
private readonly ILogger<AgeBundleKeyWrapper> _ageLogger;
private readonly ILogger<KmsBundleKeyWrapper> _kmsLogger;
private readonly IOptions<BundleEncryptionOptions> _options;
private readonly IKmsClient? _kmsClient;
private readonly Dictionary<KeyWrapperType, IBundleKeyWrapper> _wrappers;
public BundleKeyWrapperFactory(
ILogger<AgeBundleKeyWrapper> ageLogger,
ILogger<KmsBundleKeyWrapper> kmsLogger,
IOptions<BundleEncryptionOptions> options,
IKmsClient? kmsClient = null)
{
_ageLogger = ageLogger ?? throw new ArgumentNullException(nameof(ageLogger));
_kmsLogger = kmsLogger ?? throw new ArgumentNullException(nameof(kmsLogger));
_options = options ?? throw new ArgumentNullException(nameof(options));
_kmsClient = kmsClient;
_wrappers = new Dictionary<KeyWrapperType, IBundleKeyWrapper>
{
[KeyWrapperType.Age] = new AgeBundleKeyWrapper(_ageLogger, _options),
[KeyWrapperType.Kms] = new KmsBundleKeyWrapper(_kmsLogger, _kmsClient)
};
}
/// <inheritdoc />
public IBundleKeyWrapper GetWrapper(KeyWrapperType type)
{
if (_wrappers.TryGetValue(type, out var wrapper))
{
return wrapper;
}
throw new ArgumentException($"Unknown key wrapper type: {type}", nameof(type));
}
/// <inheritdoc />
public IReadOnlyList<IBundleKeyWrapper> GetAllWrappers()
{
return _wrappers.Values.ToList();
}
}

View File

@@ -0,0 +1,165 @@
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// Interface for bundle encryption and decryption.
/// </summary>
public interface IBundleEncryptor
{
/// <summary>
/// Encrypts bundle content using the configured mode.
/// </summary>
Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Decrypts bundle content using the provided metadata and keys.
/// </summary>
Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies that decrypted content matches the original hashes.
/// </summary>
Task<IReadOnlyList<string>> VerifyDecryptedContentAsync(
BundleDecryptResult decryptResult,
BundleEncryptionMetadata metadata,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for DEK key wrapping.
/// </summary>
public interface IBundleKeyWrapper
{
/// <summary>
/// Key wrapper type.
/// </summary>
KeyWrapperType Type { get; }
/// <summary>
/// Wraps a DEK for the specified recipient.
/// </summary>
Task<WrappedKeyRecipient> WrapKeyAsync(
byte[] dek,
string recipient,
Guid? tenantId = null,
Guid? runId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Unwraps a DEK using the wrapped key recipient entry.
/// </summary>
Task<KeyUnwrapResult> UnwrapKeyAsync(
WrappedKeyRecipient wrappedKey,
string? privateKey = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if this wrapper can unwrap the given recipient entry.
/// </summary>
bool CanUnwrap(WrappedKeyRecipient wrappedKey);
}
/// <summary>
/// Factory for creating bundle key wrappers.
/// </summary>
public interface IBundleKeyWrapperFactory
{
/// <summary>
/// Gets a key wrapper for the specified type.
/// </summary>
IBundleKeyWrapper GetWrapper(KeyWrapperType type);
/// <summary>
/// Gets all available key wrappers.
/// </summary>
IReadOnlyList<IBundleKeyWrapper> GetAllWrappers();
}
/// <summary>
/// Interface for KMS operations (abstraction for AWS KMS, Azure Key Vault, etc.).
/// </summary>
public interface IKmsClient
{
/// <summary>
/// Encrypts data using the specified KMS key.
/// </summary>
Task<KmsEncryptResult> EncryptAsync(
string keyId,
byte[] plaintext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Decrypts data using the specified KMS key.
/// </summary>
Task<KmsDecryptResult> DecryptAsync(
string keyId,
byte[] ciphertext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates a data key for envelope encryption.
/// </summary>
Task<KmsGenerateDataKeyResult> GenerateDataKeyAsync(
string keyId,
int keySizeBytes = 32,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of KMS encrypt operation.
/// </summary>
public sealed record KmsEncryptResult
{
public required bool Success { get; init; }
public byte[]? Ciphertext { get; init; }
public string? KeyId { get; init; }
public string? Algorithm { get; init; }
public string? Error { get; init; }
public static KmsEncryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of KMS decrypt operation.
/// </summary>
public sealed record KmsDecryptResult
{
public required bool Success { get; init; }
public byte[]? Plaintext { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
public static KmsDecryptResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of KMS generate data key operation.
/// </summary>
public sealed record KmsGenerateDataKeyResult
{
public required bool Success { get; init; }
public byte[]? Plaintext { get; init; }
public byte[]? CiphertextBlob { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
public static KmsGenerateDataKeyResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,260 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Crypto.Encryption;
/// <summary>
/// KMS key wrapper implementation.
/// Supports AWS KMS, Azure Key Vault, and other KMS providers via IKmsClient.
/// </summary>
public sealed class KmsBundleKeyWrapper : IBundleKeyWrapper
{
private readonly ILogger<KmsBundleKeyWrapper> _logger;
private readonly IKmsClient? _kmsClient;
public KmsBundleKeyWrapper(
ILogger<KmsBundleKeyWrapper> logger,
IKmsClient? kmsClient = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_kmsClient = kmsClient;
}
/// <inheritdoc />
public KeyWrapperType Type => KeyWrapperType.Kms;
/// <inheritdoc />
public async Task<WrappedKeyRecipient> WrapKeyAsync(
byte[] dek,
string recipient,
Guid? tenantId = null,
Guid? runId = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(dek);
ArgumentException.ThrowIfNullOrWhiteSpace(recipient);
if (_kmsClient is null)
{
throw new InvalidOperationException("KMS client is not configured");
}
// Build encryption context for key binding
var context = new Dictionary<string, string>
{
["purpose"] = "export-bundle-dek"
};
if (tenantId.HasValue)
{
context["tenantId"] = tenantId.Value.ToString("N");
}
if (runId.HasValue)
{
context["runId"] = runId.Value.ToString("N");
}
var result = await _kmsClient.EncryptAsync(recipient, dek, context, cancellationToken);
if (!result.Success || result.Ciphertext is null)
{
throw new InvalidOperationException($"KMS encrypt failed: {result.Error}");
}
_logger.LogDebug(
"Wrapped DEK with KMS key {KeyId}",
MaskKeyId(recipient));
return new WrappedKeyRecipient
{
Type = "kms",
KmsKeyId = recipient,
WrappedKey = Convert.ToBase64String(result.Ciphertext),
KeyId = result.KeyId,
Algorithm = result.Algorithm
};
}
/// <inheritdoc />
public async Task<KeyUnwrapResult> UnwrapKeyAsync(
WrappedKeyRecipient wrappedKey,
string? privateKey = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(wrappedKey);
if (_kmsClient is null)
{
return KeyUnwrapResult.Failed("KMS client is not configured");
}
if (string.IsNullOrEmpty(wrappedKey.KmsKeyId))
{
return KeyUnwrapResult.Failed("KMS key ID is required");
}
try
{
var ciphertext = Convert.FromBase64String(wrappedKey.WrappedKey);
// Build encryption context (must match what was used during encryption)
var context = new Dictionary<string, string>
{
["purpose"] = "export-bundle-dek"
};
var result = await _kmsClient.DecryptAsync(
wrappedKey.KmsKeyId,
ciphertext,
context,
cancellationToken);
if (!result.Success || result.Plaintext is null)
{
return KeyUnwrapResult.Failed($"KMS decrypt failed: {result.Error}");
}
_logger.LogDebug("Unwrapped DEK with KMS key {KeyId}", MaskKeyId(wrappedKey.KmsKeyId));
return new KeyUnwrapResult
{
Success = true,
Key = result.Plaintext
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to unwrap DEK with KMS");
return KeyUnwrapResult.Failed($"KMS unwrap failed: {ex.Message}");
}
}
/// <inheritdoc />
public bool CanUnwrap(WrappedKeyRecipient wrappedKey)
{
return string.Equals(wrappedKey.Type, "kms", StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(wrappedKey.KmsKeyId) &&
_kmsClient is not null;
}
private static string MaskKeyId(string keyId)
{
if (keyId.Length <= 12)
return "***";
return $"{keyId[..8]}...{keyId[^4..]}";
}
}
/// <summary>
/// Stub KMS client for testing and offline environments.
/// Uses local symmetric encryption as a stand-in for KMS.
/// </summary>
public sealed class StubKmsClient : IKmsClient
{
private readonly Dictionary<string, byte[]> _keys = new();
/// <summary>
/// Registers a key for testing.
/// </summary>
public void RegisterKey(string keyId, byte[] key)
{
_keys[keyId] = key;
}
/// <inheritdoc />
public Task<KmsEncryptResult> EncryptAsync(
string keyId,
byte[] plaintext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default)
{
if (!_keys.TryGetValue(keyId, out var key))
{
// Generate a key for testing
key = new byte[32];
System.Security.Cryptography.RandomNumberGenerator.Fill(key);
_keys[keyId] = key;
}
using var aes = System.Security.Cryptography.Aes.Create();
aes.Key = key;
aes.GenerateIV();
using var encryptor = aes.CreateEncryptor();
var encrypted = encryptor.TransformFinalBlock(plaintext, 0, plaintext.Length);
// Format: iv || encrypted
var result = new byte[aes.IV.Length + encrypted.Length];
Buffer.BlockCopy(aes.IV, 0, result, 0, aes.IV.Length);
Buffer.BlockCopy(encrypted, 0, result, aes.IV.Length, encrypted.Length);
return Task.FromResult(new KmsEncryptResult
{
Success = true,
Ciphertext = result,
KeyId = keyId,
Algorithm = "AES-256-CBC"
});
}
/// <inheritdoc />
public Task<KmsDecryptResult> DecryptAsync(
string keyId,
byte[] ciphertext,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default)
{
if (!_keys.TryGetValue(keyId, out var key))
{
return Task.FromResult(KmsDecryptResult.Failed($"Key not found: {keyId}"));
}
// Extract IV and encrypted data
var iv = new byte[16];
var encrypted = new byte[ciphertext.Length - 16];
Buffer.BlockCopy(ciphertext, 0, iv, 0, 16);
Buffer.BlockCopy(ciphertext, 16, encrypted, 0, encrypted.Length);
using var aes = System.Security.Cryptography.Aes.Create();
aes.Key = key;
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
var plaintext = decryptor.TransformFinalBlock(encrypted, 0, encrypted.Length);
return Task.FromResult(new KmsDecryptResult
{
Success = true,
Plaintext = plaintext,
KeyId = keyId
});
}
/// <inheritdoc />
public Task<KmsGenerateDataKeyResult> GenerateDataKeyAsync(
string keyId,
int keySizeBytes = 32,
IDictionary<string, string>? encryptionContext = null,
CancellationToken cancellationToken = default)
{
// Generate random data key
var plaintext = new byte[keySizeBytes];
System.Security.Cryptography.RandomNumberGenerator.Fill(plaintext);
// Encrypt it
var encryptResult = EncryptAsync(keyId, plaintext, encryptionContext, cancellationToken).GetAwaiter().GetResult();
if (!encryptResult.Success)
{
return Task.FromResult(KmsGenerateDataKeyResult.Failed(encryptResult.Error ?? "Encryption failed"));
}
return Task.FromResult(new KmsGenerateDataKeyResult
{
Success = true,
Plaintext = plaintext,
CiphertextBlob = encryptResult.Ciphertext,
KeyId = keyId
});
}
}

View File

@@ -0,0 +1,259 @@
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Crypto;
/// <summary>
/// Centralized crypto routing service for ExportCenter.
/// Routes hashing, signing, and encryption operations through ICryptoProviderRegistry and ICryptoHash
/// with configurable provider selection.
/// </summary>
public interface IExportCryptoService
{
/// <summary>
/// Computes a content hash using the configured algorithm and provider.
/// </summary>
string ComputeContentHash(ReadOnlySpan<byte> data);
/// <summary>
/// Computes a content hash for a stream.
/// </summary>
Task<string> ComputeContentHashAsync(Stream stream, CancellationToken cancellationToken = default);
/// <summary>
/// Computes an HMAC for signing purposes.
/// </summary>
byte[] ComputeHmacForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data);
/// <summary>
/// Computes an HMAC for signing and returns as base64.
/// </summary>
string ComputeHmacBase64ForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data);
/// <summary>
/// Gets a signer for asymmetric signing operations.
/// </summary>
ICryptoSigner GetSigner(string keyId, string? algorithmOverride = null);
/// <summary>
/// Gets a content hasher with the configured algorithm.
/// </summary>
ICryptoHasher GetHasher(string? algorithmOverride = null);
/// <summary>
/// Gets the current crypto configuration.
/// </summary>
ExportCryptoConfiguration CurrentConfiguration { get; }
}
/// <summary>
/// Configuration for export crypto operations.
/// </summary>
public sealed class ExportCryptoOptions
{
/// <summary>
/// Default hash algorithm for content hashing (e.g., "SHA-256", "SHA-384").
/// </summary>
public string HashAlgorithm { get; set; } = "SHA-256";
/// <summary>
/// Default signing algorithm for asymmetric signing (e.g., "ES256", "ES384", "PS256").
/// </summary>
public string SigningAlgorithm { get; set; } = "ES256";
/// <summary>
/// Preferred crypto provider for operations (e.g., "default", "CryptoPro", "OpenSSL").
/// </summary>
public string? PreferredProvider { get; set; }
/// <summary>
/// Default key ID for signing operations.
/// </summary>
public string? DefaultKeyId { get; set; }
/// <summary>
/// Whether to use compliance-profile-aware operations.
/// </summary>
public bool UseComplianceProfile { get; set; } = true;
/// <summary>
/// Algorithm overrides by purpose.
/// </summary>
public Dictionary<string, string> AlgorithmOverrides { get; set; } = new();
}
/// <summary>
/// Runtime crypto configuration snapshot.
/// </summary>
public sealed record ExportCryptoConfiguration(
string HashAlgorithm,
string SigningAlgorithm,
string? Provider,
string? KeyId);
/// <summary>
/// Default implementation of export crypto service.
/// </summary>
public sealed class ExportCryptoService : IExportCryptoService
{
private readonly ILogger<ExportCryptoService> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly ICryptoHmac _cryptoHmac;
private readonly ICryptoProviderRegistry? _cryptoRegistry;
private readonly ExportCryptoOptions _options;
public ExportCryptoService(
ILogger<ExportCryptoService> logger,
ICryptoHash cryptoHash,
ICryptoHmac cryptoHmac,
IOptions<ExportCryptoOptions>? options = null,
ICryptoProviderRegistry? cryptoRegistry = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
_cryptoRegistry = cryptoRegistry;
_options = options?.Value ?? new ExportCryptoOptions();
}
public ExportCryptoConfiguration CurrentConfiguration => new(
_options.HashAlgorithm,
_options.SigningAlgorithm,
_options.PreferredProvider,
_options.DefaultKeyId);
public string ComputeContentHash(ReadOnlySpan<byte> data)
{
// Use ICryptoHash which handles provider selection internally
return _cryptoHash.ComputeHashHexForPurpose(data, HashPurpose.Content);
}
public async Task<string> ComputeContentHashAsync(Stream stream, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
// Read stream into memory for hashing
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
ms.Position = 0;
return _cryptoHash.ComputeHashHexForPurpose(ms.ToArray(), HashPurpose.Content);
}
public byte[] ComputeHmacForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data)
{
return _cryptoHmac.ComputeHmacForPurpose(key, data, HmacPurpose.Signing);
}
public string ComputeHmacBase64ForSigning(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data)
{
return _cryptoHmac.ComputeHmacBase64ForPurpose(key, data, HmacPurpose.Signing);
}
public ICryptoSigner GetSigner(string keyId, string? algorithmOverride = null)
{
if (_cryptoRegistry is null)
{
throw new InvalidOperationException(
"ICryptoProviderRegistry is not configured. Cannot get asymmetric signer.");
}
var algorithm = algorithmOverride ?? _options.SigningAlgorithm;
var keyRef = new CryptoKeyReference(keyId, _options.PreferredProvider);
var resolution = _cryptoRegistry.ResolveSigner(
CryptoCapability.Signing,
algorithm,
keyRef,
_options.PreferredProvider);
_logger.LogDebug(
"Resolved signer for key {KeyId} with algorithm {Algorithm} from provider {Provider}",
keyId,
algorithm,
resolution.ProviderName);
return resolution.Signer;
}
public ICryptoHasher GetHasher(string? algorithmOverride = null)
{
if (_cryptoRegistry is null)
{
throw new InvalidOperationException(
"ICryptoProviderRegistry is not configured. Use ComputeContentHash instead.");
}
var algorithm = algorithmOverride ?? _options.HashAlgorithm;
var resolution = _cryptoRegistry.ResolveHasher(algorithm, _options.PreferredProvider);
_logger.LogDebug(
"Resolved hasher for algorithm {Algorithm} from provider {Provider}",
algorithm,
resolution.ProviderName);
return resolution.Hasher;
}
}
/// <summary>
/// Factory for creating ExportCryptoService with specific configuration.
/// </summary>
public interface IExportCryptoServiceFactory
{
/// <summary>
/// Creates an export crypto service with the specified options.
/// </summary>
IExportCryptoService Create(ExportCryptoOptions options);
/// <summary>
/// Creates an export crypto service for a specific provider.
/// </summary>
IExportCryptoService CreateForProvider(string providerName);
}
/// <summary>
/// Default implementation of export crypto service factory.
/// </summary>
public sealed class ExportCryptoServiceFactory : IExportCryptoServiceFactory
{
private readonly ILogger<ExportCryptoService> _logger;
private readonly ICryptoHash _cryptoHash;
private readonly ICryptoHmac _cryptoHmac;
private readonly ICryptoProviderRegistry? _cryptoRegistry;
public ExportCryptoServiceFactory(
ILogger<ExportCryptoService> logger,
ICryptoHash cryptoHash,
ICryptoHmac cryptoHmac,
ICryptoProviderRegistry? cryptoRegistry = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
_cryptoRegistry = cryptoRegistry;
}
public IExportCryptoService Create(ExportCryptoOptions options)
{
ArgumentNullException.ThrowIfNull(options);
return new ExportCryptoService(
_logger,
_cryptoHash,
_cryptoHmac,
Options.Create(options),
_cryptoRegistry);
}
public IExportCryptoService CreateForProvider(string providerName)
{
ArgumentException.ThrowIfNullOrWhiteSpace(providerName);
var options = new ExportCryptoOptions { PreferredProvider = providerName };
return Create(options);
}
}

View File

@@ -0,0 +1,267 @@
using Microsoft.Extensions.Logging;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Distribution;
/// <summary>
/// Default implementation of the distribution lifecycle service.
/// </summary>
public sealed class DistributionLifecycleService : IDistributionLifecycleService
{
private readonly IDistributionRepository _repository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<DistributionLifecycleService> _logger;
public DistributionLifecycleService(
IDistributionRepository repository,
TimeProvider timeProvider,
ILogger<DistributionLifecycleService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<ExportDistribution> CreateDistributionAsync(
CreateDistributionRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
// Check idempotency key
if (!string.IsNullOrEmpty(request.IdempotencyKey))
{
var existing = await _repository.GetByIdempotencyKeyAsync(
request.IdempotencyKey, cancellationToken);
if (existing is not null)
{
_logger.LogDebug(
"Found existing distribution {DistributionId} for idempotency key {Key}",
existing.DistributionId, request.IdempotencyKey);
return existing;
}
}
var now = _timeProvider.GetUtcNow();
// Calculate retention expiry
DateTimeOffset? retentionExpiresAt = null;
Guid? retentionPolicyId = null;
if (request.RetentionPolicy is { Enabled: true })
{
retentionPolicyId = request.RetentionPolicy.PolicyId;
retentionExpiresAt = request.RetentionPolicy.CalculateExpiryAt(now);
}
var distribution = new ExportDistribution
{
DistributionId = Guid.NewGuid(),
RunId = request.RunId,
TenantId = request.TenantId,
Kind = request.Kind,
Status = ExportDistributionStatus.Pending,
Target = request.Target,
ArtifactPath = request.ArtifactPath,
IdempotencyKey = request.IdempotencyKey,
RetentionPolicyId = retentionPolicyId,
RetentionExpiresAt = retentionExpiresAt,
CreatedAt = now,
AttemptCount = 0
};
var created = await _repository.CreateAsync(distribution, cancellationToken);
_logger.LogInformation(
"Created distribution {DistributionId} for run {RunId} targeting {Kind}:{Target}",
created.DistributionId, request.RunId, request.Kind, request.Target);
return created;
}
/// <inheritdoc />
public Task<ExportDistribution?> GetDistributionAsync(
Guid distributionId,
CancellationToken cancellationToken = default)
=> _repository.GetByIdAsync(distributionId, cancellationToken);
/// <inheritdoc />
public Task<ExportDistribution?> GetDistributionByIdempotencyKeyAsync(
string idempotencyKey,
CancellationToken cancellationToken = default)
=> _repository.GetByIdempotencyKeyAsync(idempotencyKey, cancellationToken);
/// <inheritdoc />
public Task<IReadOnlyList<ExportDistribution>> GetDistributionsForRunAsync(
Guid runId,
CancellationToken cancellationToken = default)
=> _repository.GetByRunIdAsync(runId, cancellationToken);
/// <inheritdoc />
public async Task<bool> UpdateDistributionMetadataAsync(
UpdateDistributionMetadataRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var distribution = await _repository.GetByIdAsync(request.DistributionId, cancellationToken);
if (distribution is null)
{
_logger.LogWarning("Distribution {DistributionId} not found", request.DistributionId);
return false;
}
// Optimistic concurrency check
if (request.ExpectedStatus.HasValue && distribution.Status != request.ExpectedStatus.Value)
{
_logger.LogWarning(
"Distribution {DistributionId} status mismatch: expected {Expected}, actual {Actual}",
request.DistributionId, request.ExpectedStatus.Value, distribution.Status);
return false;
}
var now = _timeProvider.GetUtcNow();
// Create updated distribution with changed fields
var updated = new ExportDistribution
{
DistributionId = distribution.DistributionId,
RunId = distribution.RunId,
TenantId = distribution.TenantId,
Kind = distribution.Kind,
Status = request.Status ?? distribution.Status,
Target = distribution.Target,
ArtifactPath = distribution.ArtifactPath,
ArtifactHash = request.ArtifactHash ?? distribution.ArtifactHash,
SizeBytes = request.SizeBytes ?? distribution.SizeBytes,
ContentType = request.ContentType ?? distribution.ContentType,
MetadataJson = request.MetadataJson ?? distribution.MetadataJson,
ErrorJson = distribution.ErrorJson,
AttemptCount = distribution.AttemptCount,
IdempotencyKey = distribution.IdempotencyKey,
OciManifestDigest = request.OciManifestDigest ?? distribution.OciManifestDigest,
OciImageReference = request.OciImageReference ?? distribution.OciImageReference,
RetentionPolicyId = distribution.RetentionPolicyId,
RetentionExpiresAt = distribution.RetentionExpiresAt,
MarkedForDeletion = distribution.MarkedForDeletion,
CreatedAt = distribution.CreatedAt,
DistributedAt = request.DistributedAt ?? distribution.DistributedAt,
VerifiedAt = request.VerifiedAt ?? distribution.VerifiedAt,
UpdatedAt = now,
DeletedAt = distribution.DeletedAt
};
var result = await _repository.UpdateAsync(updated, cancellationToken);
if (result)
{
_logger.LogDebug(
"Updated distribution {DistributionId} metadata",
request.DistributionId);
}
return result;
}
/// <inheritdoc />
public async Task<bool> TransitionStatusAsync(
Guid distributionId,
ExportDistributionStatus newStatus,
string? errorJson = null,
CancellationToken cancellationToken = default)
{
var distribution = await _repository.GetByIdAsync(distributionId, cancellationToken);
if (distribution is null)
{
_logger.LogWarning("Distribution {DistributionId} not found for status transition", distributionId);
return false;
}
// Validate transition
if (!IsValidTransition(distribution.Status, newStatus))
{
_logger.LogWarning(
"Invalid status transition for distribution {DistributionId}: {From} -> {To}",
distributionId, distribution.Status, newStatus);
return false;
}
var result = await _repository.UpdateStatusAsync(
distributionId, newStatus, distribution.Status, errorJson, cancellationToken);
if (result)
{
_logger.LogInformation(
"Transitioned distribution {DistributionId} from {From} to {To}",
distributionId, distribution.Status, newStatus);
}
return result;
}
/// <inheritdoc />
public async Task<int> ApplyRetentionPoliciesAsync(
Guid tenantId,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var marked = await _repository.MarkForDeletionAsync(tenantId, now, cancellationToken);
if (marked > 0)
{
_logger.LogInformation(
"Marked {Count} distributions for deletion in tenant {TenantId}",
marked, tenantId);
}
return marked;
}
/// <inheritdoc />
public async Task<int> PruneMarkedDistributionsAsync(
Guid tenantId,
int batchSize = 100,
CancellationToken cancellationToken = default)
{
var deleted = await _repository.DeleteMarkedAsync(tenantId, batchSize, cancellationToken);
if (deleted > 0)
{
_logger.LogInformation(
"Pruned {Count} marked distributions in tenant {TenantId}",
deleted, tenantId);
}
return deleted;
}
/// <inheritdoc />
public Task<IReadOnlyList<ExportDistribution>> GetExpiredDistributionsAsync(
Guid tenantId,
int limit = 100,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
return _repository.GetExpiredAsync(tenantId, now, limit, cancellationToken);
}
private static bool IsValidTransition(ExportDistributionStatus from, ExportDistributionStatus to)
{
return (from, to) switch
{
(ExportDistributionStatus.Pending, ExportDistributionStatus.Distributing) => true,
(ExportDistributionStatus.Pending, ExportDistributionStatus.Cancelled) => true,
(ExportDistributionStatus.Pending, ExportDistributionStatus.Failed) => true,
(ExportDistributionStatus.Distributing, ExportDistributionStatus.Distributed) => true,
(ExportDistributionStatus.Distributing, ExportDistributionStatus.Failed) => true,
(ExportDistributionStatus.Distributing, ExportDistributionStatus.Cancelled) => true,
(ExportDistributionStatus.Distributed, ExportDistributionStatus.Verified) => true,
(ExportDistributionStatus.Distributed, ExportDistributionStatus.Failed) => true,
// Retry from failed
(ExportDistributionStatus.Failed, ExportDistributionStatus.Pending) => true,
_ => false
};
}
}

View File

@@ -0,0 +1,155 @@
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Distribution;
/// <summary>
/// Service for managing the lifecycle of export distributions.
/// </summary>
public interface IDistributionLifecycleService
{
/// <summary>
/// Creates a new distribution record, respecting idempotency keys.
/// </summary>
/// <param name="request">Distribution creation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Created or existing distribution (if idempotent).</returns>
Task<ExportDistribution> CreateDistributionAsync(
CreateDistributionRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a distribution by ID.
/// </summary>
Task<ExportDistribution?> GetDistributionAsync(
Guid distributionId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a distribution by idempotency key.
/// </summary>
Task<ExportDistribution?> GetDistributionByIdempotencyKeyAsync(
string idempotencyKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets all distributions for a run.
/// </summary>
Task<IReadOnlyList<ExportDistribution>> GetDistributionsForRunAsync(
Guid runId,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates distribution metadata idempotently.
/// </summary>
/// <param name="request">Update request with optional optimistic concurrency check.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if update succeeded, false if concurrency check failed.</returns>
Task<bool> UpdateDistributionMetadataAsync(
UpdateDistributionMetadataRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Transitions a distribution to a new status.
/// </summary>
/// <param name="distributionId">Distribution ID.</param>
/// <param name="newStatus">New status.</param>
/// <param name="errorJson">Error details if transitioning to Failed.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if transition succeeded.</returns>
Task<bool> TransitionStatusAsync(
Guid distributionId,
ExportDistributionStatus newStatus,
string? errorJson = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Marks distributions for deletion based on retention policy.
/// </summary>
/// <param name="tenantId">Tenant ID to scope the operation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of distributions marked for deletion.</returns>
Task<int> ApplyRetentionPoliciesAsync(
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes distributions that have been marked for deletion.
/// </summary>
/// <param name="tenantId">Tenant ID to scope the operation.</param>
/// <param name="batchSize">Maximum number to delete per call.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of distributions deleted.</returns>
Task<int> PruneMarkedDistributionsAsync(
Guid tenantId,
int batchSize = 100,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets distributions that have expired based on retention policy.
/// </summary>
Task<IReadOnlyList<ExportDistribution>> GetExpiredDistributionsAsync(
Guid tenantId,
int limit = 100,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Repository interface for distribution persistence.
/// </summary>
public interface IDistributionRepository
{
Task<ExportDistribution> CreateAsync(
ExportDistribution distribution,
CancellationToken cancellationToken = default);
Task<ExportDistribution?> GetByIdAsync(
Guid distributionId,
CancellationToken cancellationToken = default);
Task<ExportDistribution?> GetByIdempotencyKeyAsync(
string idempotencyKey,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetByRunIdAsync(
Guid runId,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetByTenantIdAsync(
Guid tenantId,
ExportDistributionStatus? status = null,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetExpiredAsync(
Guid tenantId,
DateTimeOffset asOf,
int limit = 100,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportDistribution>> GetMarkedForDeletionAsync(
Guid tenantId,
int limit = 100,
CancellationToken cancellationToken = default);
Task<bool> UpdateAsync(
ExportDistribution distribution,
CancellationToken cancellationToken = default);
Task<bool> UpdateStatusAsync(
Guid distributionId,
ExportDistributionStatus newStatus,
ExportDistributionStatus? expectedStatus = null,
string? errorJson = null,
CancellationToken cancellationToken = default);
Task<int> MarkForDeletionAsync(
Guid tenantId,
DateTimeOffset expiryBefore,
CancellationToken cancellationToken = default);
Task<int> DeleteMarkedAsync(
Guid tenantId,
int batchSize = 100,
CancellationToken cancellationToken = default);
}

View File

@@ -55,11 +55,51 @@ public sealed class ExportDistribution
/// </summary>
public int AttemptCount { get; init; }
/// <summary>
/// Idempotency key to prevent duplicate distributions.
/// </summary>
public string? IdempotencyKey { get; init; }
/// <summary>
/// OCI manifest digest for registry distributions.
/// </summary>
public string? OciManifestDigest { get; init; }
/// <summary>
/// OCI image reference for registry distributions.
/// </summary>
public string? OciImageReference { get; init; }
/// <summary>
/// Retention policy ID applied to this distribution.
/// </summary>
public Guid? RetentionPolicyId { get; init; }
/// <summary>
/// Timestamp when this distribution expires based on retention policy.
/// </summary>
public DateTimeOffset? RetentionExpiresAt { get; init; }
/// <summary>
/// Whether this distribution has been marked for deletion.
/// </summary>
public bool MarkedForDeletion { get; init; }
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset? DistributedAt { get; init; }
public DateTimeOffset? VerifiedAt { get; init; }
/// <summary>
/// Timestamp when this distribution was last updated.
/// </summary>
public DateTimeOffset? UpdatedAt { get; init; }
/// <summary>
/// Timestamp when this distribution was deleted (if applicable).
/// </summary>
public DateTimeOffset? DeletedAt { get; init; }
}
/// <summary>
@@ -90,7 +130,22 @@ public enum ExportDistributionKind
/// <summary>
/// Webhook notification (metadata only).
/// </summary>
Webhook = 5
Webhook = 5,
/// <summary>
/// OCI registry distribution (artifact push).
/// </summary>
OciRegistry = 6,
/// <summary>
/// Azure Blob Storage distribution.
/// </summary>
AzureBlob = 7,
/// <summary>
/// Google Cloud Storage distribution.
/// </summary>
GoogleCloudStorage = 8
}
/// <summary>

View File

@@ -0,0 +1,286 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Domain;
/// <summary>
/// Configuration for a distribution target.
/// </summary>
public sealed record ExportDistributionTarget
{
public required Guid TargetId { get; init; }
public required Guid ProfileId { get; init; }
public required Guid TenantId { get; init; }
public required string Name { get; init; }
public required ExportDistributionKind Kind { get; init; }
public required bool Enabled { get; init; }
/// <summary>
/// Priority for distribution ordering (lower = higher priority).
/// </summary>
public int Priority { get; init; }
/// <summary>
/// Target-specific configuration (serialized JSON).
/// </summary>
public string? ConfigJson { get; init; }
/// <summary>
/// Retention policy for artifacts at this target.
/// </summary>
public ExportRetentionPolicy? RetentionPolicy { get; init; }
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset? UpdatedAt { get; init; }
}
/// <summary>
/// Configuration for OCI registry distribution target.
/// </summary>
public sealed record OciDistributionTargetConfig
{
[JsonPropertyName("registry")]
public required string Registry { get; init; }
[JsonPropertyName("repository")]
public string? Repository { get; init; }
[JsonPropertyName("tagPattern")]
public string TagPattern { get; init; } = "{run-id}";
[JsonPropertyName("artifactType")]
public string? ArtifactType { get; init; }
[JsonPropertyName("createReferrer")]
public bool CreateReferrer { get; init; }
[JsonPropertyName("authRef")]
public string? AuthRef { get; init; }
[JsonPropertyName("retryCount")]
public int RetryCount { get; init; } = 3;
[JsonPropertyName("timeoutSeconds")]
public int TimeoutSeconds { get; init; } = 300;
}
/// <summary>
/// Configuration for S3/object storage distribution target.
/// </summary>
public sealed record ObjectStorageDistributionTargetConfig
{
[JsonPropertyName("endpoint")]
public string? Endpoint { get; init; }
[JsonPropertyName("bucket")]
public required string Bucket { get; init; }
[JsonPropertyName("prefix")]
public string? Prefix { get; init; }
[JsonPropertyName("region")]
public string? Region { get; init; }
[JsonPropertyName("storageClass")]
public string? StorageClass { get; init; }
[JsonPropertyName("serverSideEncryption")]
public string? ServerSideEncryption { get; init; }
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
[JsonPropertyName("authRef")]
public string? AuthRef { get; init; }
}
/// <summary>
/// Retention policy for export artifacts.
/// </summary>
public sealed record ExportRetentionPolicy
{
/// <summary>
/// Unique identifier for the retention policy.
/// </summary>
public Guid PolicyId { get; init; } = Guid.NewGuid();
/// <summary>
/// Duration to retain artifacts (e.g., "30d", "1y").
/// </summary>
[JsonPropertyName("retentionPeriod")]
public string? RetentionPeriod { get; init; }
/// <summary>
/// Retention period in days (parsed from RetentionPeriod or set directly).
/// </summary>
[JsonPropertyName("retentionDays")]
public int? RetentionDays { get; init; }
/// <summary>
/// Maximum number of artifacts to retain (FIFO pruning).
/// </summary>
[JsonPropertyName("maxArtifacts")]
public int? MaxArtifacts { get; init; }
/// <summary>
/// Maximum total size in bytes to retain.
/// </summary>
[JsonPropertyName("maxSizeBytes")]
public long? MaxSizeBytes { get; init; }
/// <summary>
/// Whether to delete artifacts when retention expires.
/// </summary>
[JsonPropertyName("deleteOnExpiry")]
public bool DeleteOnExpiry { get; init; } = true;
/// <summary>
/// Whether retention policy is enforced.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
/// <summary>
/// Calculates the expiry timestamp based on this policy.
/// </summary>
public DateTimeOffset? CalculateExpiryAt(DateTimeOffset from)
{
if (RetentionDays.HasValue)
{
return from.AddDays(RetentionDays.Value);
}
if (!string.IsNullOrEmpty(RetentionPeriod))
{
return ParseRetentionPeriod(RetentionPeriod, from);
}
return null;
}
private static DateTimeOffset? ParseRetentionPeriod(string period, DateTimeOffset from)
{
if (string.IsNullOrWhiteSpace(period))
return null;
var span = period.Trim();
if (span.Length < 2)
return null;
var unit = char.ToLowerInvariant(span[^1]);
if (!int.TryParse(span[..^1], out var value))
return null;
return unit switch
{
'd' => from.AddDays(value),
'w' => from.AddDays(value * 7),
'm' => from.AddMonths(value),
'y' => from.AddYears(value),
'h' => from.AddHours(value),
_ => null
};
}
}
/// <summary>
/// Result of a distribution operation.
/// </summary>
public sealed record DistributionResult
{
public required bool Success { get; init; }
public Guid DistributionId { get; init; }
public ExportDistributionStatus Status { get; init; }
public string? Target { get; init; }
public string? ArtifactHash { get; init; }
public long SizeBytes { get; init; }
public string? OciManifestDigest { get; init; }
public string? OciImageReference { get; init; }
public int AttemptCount { get; init; }
public string? ErrorMessage { get; init; }
public string? ErrorCode { get; init; }
public static DistributionResult Failed(string errorMessage, string? errorCode = null)
=> new()
{
Success = false,
Status = ExportDistributionStatus.Failed,
ErrorMessage = errorMessage,
ErrorCode = errorCode
};
}
/// <summary>
/// Request to create or update a distribution.
/// </summary>
public sealed record CreateDistributionRequest
{
public required Guid RunId { get; init; }
public required Guid TenantId { get; init; }
public required ExportDistributionKind Kind { get; init; }
public required string Target { get; init; }
public required string ArtifactPath { get; init; }
/// <summary>
/// Idempotency key to prevent duplicate distributions.
/// If a distribution with this key already exists, returns the existing one.
/// </summary>
public string? IdempotencyKey { get; init; }
public ExportRetentionPolicy? RetentionPolicy { get; init; }
public OciDistributionTargetConfig? OciConfig { get; init; }
public ObjectStorageDistributionTargetConfig? ObjectStorageConfig { get; init; }
}
/// <summary>
/// Request to update distribution metadata idempotently.
/// </summary>
public sealed record UpdateDistributionMetadataRequest
{
public required Guid DistributionId { get; init; }
public ExportDistributionStatus? Status { get; init; }
public string? ArtifactHash { get; init; }
public long? SizeBytes { get; init; }
public string? ContentType { get; init; }
public string? MetadataJson { get; init; }
public string? OciManifestDigest { get; init; }
public string? OciImageReference { get; init; }
public DateTimeOffset? DistributedAt { get; init; }
public DateTimeOffset? VerifiedAt { get; init; }
/// <summary>
/// Expected current status for optimistic concurrency.
/// If set, update fails if current status doesn't match.
/// </summary>
public ExportDistributionStatus? ExpectedStatus { get; init; }
}

View File

@@ -3,7 +3,7 @@ namespace StellaOps.ExportCenter.Core.Domain;
/// <summary>
/// Represents a single execution of an export profile.
/// </summary>
public sealed class ExportRun
public sealed record ExportRun
{
public required Guid RunId { get; init; }

View File

@@ -0,0 +1,351 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Encryption mode for export bundles.
/// </summary>
public enum BundleEncryptionMode
{
/// <summary>
/// No encryption.
/// </summary>
None = 0,
/// <summary>
/// age encryption (X25519) - preferred for offline/air-gapped deployments.
/// </summary>
Age = 1,
/// <summary>
/// AES-GCM with KMS key wrapping - for HSM/Authority integration.
/// </summary>
AesGcmKms = 2
}
/// <summary>
/// Configuration for bundle encryption.
/// </summary>
public sealed record BundleEncryptionOptions
{
/// <summary>
/// Encryption mode.
/// </summary>
[JsonPropertyName("mode")]
public BundleEncryptionMode Mode { get; init; } = BundleEncryptionMode.None;
/// <summary>
/// age public key recipients (for Age mode).
/// </summary>
[JsonPropertyName("recipients")]
public IReadOnlyList<string> Recipients { get; init; } = [];
/// <summary>
/// KMS key ID for key wrapping (for AesGcmKms mode).
/// </summary>
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
/// <summary>
/// Whether to fail if encryption cannot be performed.
/// </summary>
[JsonPropertyName("strict")]
public bool Strict { get; init; } = true;
/// <summary>
/// AAD format string (default: "{runId}:{relativePath}").
/// </summary>
[JsonPropertyName("aadFormat")]
public string AadFormat { get; init; } = "{runId}:{relativePath}";
}
/// <summary>
/// Request to encrypt bundle content.
/// </summary>
public sealed record BundleEncryptRequest
{
/// <summary>
/// Run identifier for AAD binding.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant identifier for KMS context.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Encryption options.
/// </summary>
public required BundleEncryptionOptions Options { get; init; }
/// <summary>
/// Files to encrypt with their relative paths.
/// </summary>
public required IReadOnlyList<BundleFileToEncrypt> Files { get; init; }
}
/// <summary>
/// A file to encrypt within a bundle.
/// </summary>
public sealed record BundleFileToEncrypt
{
/// <summary>
/// Relative path within the bundle (used for AAD).
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Source file path to read plaintext from.
/// </summary>
public required string SourcePath { get; init; }
/// <summary>
/// Destination path for encrypted content.
/// </summary>
public required string DestinationPath { get; init; }
}
/// <summary>
/// Result of encrypting bundle content.
/// </summary>
public sealed record BundleEncryptResult
{
/// <summary>
/// Whether encryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Encrypted file results.
/// </summary>
public IReadOnlyList<EncryptedFileResult> EncryptedFiles { get; init; } = [];
/// <summary>
/// Encryption metadata for provenance.
/// </summary>
public BundleEncryptionMetadata? Metadata { get; init; }
/// <summary>
/// Error message if encryption failed.
/// </summary>
public string? ErrorMessage { get; init; }
public static BundleEncryptResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Result of encrypting a single file.
/// </summary>
public sealed record EncryptedFileResult
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Path to encrypted file.
/// </summary>
public required string EncryptedPath { get; init; }
/// <summary>
/// Nonce used for encryption (12 bytes, base64).
/// </summary>
public required string Nonce { get; init; }
/// <summary>
/// Size of encrypted content.
/// </summary>
public long EncryptedSizeBytes { get; init; }
/// <summary>
/// SHA-256 hash of original plaintext (for verification).
/// </summary>
public string? PlaintextHash { get; init; }
}
/// <summary>
/// Encryption metadata for provenance.
/// </summary>
public sealed record BundleEncryptionMetadata
{
/// <summary>
/// Encryption mode used.
/// </summary>
[JsonPropertyName("mode")]
public required string Mode { get; init; }
/// <summary>
/// AAD format used.
/// </summary>
[JsonPropertyName("aadFormat")]
public required string AadFormat { get; init; }
/// <summary>
/// Nonce format (always "random-12").
/// </summary>
[JsonPropertyName("nonceFormat")]
public string NonceFormat { get; init; } = "random-12";
/// <summary>
/// Wrapped DEK recipients.
/// </summary>
[JsonPropertyName("recipients")]
public IReadOnlyList<WrappedKeyRecipient> Recipients { get; init; } = [];
}
/// <summary>
/// A recipient with wrapped DEK.
/// </summary>
public sealed record WrappedKeyRecipient
{
/// <summary>
/// Type of recipient ("age" or "kms").
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// age public key (for age type).
/// </summary>
[JsonPropertyName("recipient")]
public string? Recipient { get; init; }
/// <summary>
/// KMS key ID (for kms type).
/// </summary>
[JsonPropertyName("kmsKeyId")]
public string? KmsKeyId { get; init; }
/// <summary>
/// Wrapped DEK (base64).
/// </summary>
[JsonPropertyName("wrappedKey")]
public required string WrappedKey { get; init; }
/// <summary>
/// Optional key identifier.
/// </summary>
[JsonPropertyName("keyId")]
public string? KeyId { get; init; }
/// <summary>
/// Algorithm used for wrapping (for KMS).
/// </summary>
[JsonPropertyName("algorithm")]
public string? Algorithm { get; init; }
}
/// <summary>
/// Request to decrypt bundle content.
/// </summary>
public sealed record BundleDecryptRequest
{
/// <summary>
/// Run identifier for AAD validation.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant identifier for KMS context.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Encryption metadata from provenance.
/// </summary>
public required BundleEncryptionMetadata Metadata { get; init; }
/// <summary>
/// age private key for decryption (for Age mode).
/// </summary>
public string? AgePrivateKey { get; init; }
/// <summary>
/// Files to decrypt with their nonces.
/// </summary>
public required IReadOnlyList<BundleFileToDecrypt> Files { get; init; }
}
/// <summary>
/// A file to decrypt within a bundle.
/// </summary>
public sealed record BundleFileToDecrypt
{
/// <summary>
/// Relative path within the bundle (used for AAD validation).
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Source path of encrypted file.
/// </summary>
public required string SourcePath { get; init; }
/// <summary>
/// Destination path for decrypted content.
/// </summary>
public required string DestinationPath { get; init; }
/// <summary>
/// Nonce used during encryption (base64).
/// </summary>
public required string Nonce { get; init; }
/// <summary>
/// Expected plaintext hash for verification.
/// </summary>
public string? ExpectedHash { get; init; }
}
/// <summary>
/// Result of decrypting bundle content.
/// </summary>
public sealed record BundleDecryptResult
{
/// <summary>
/// Whether decryption succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Decrypted file results.
/// </summary>
public IReadOnlyList<DecryptedFileResult> DecryptedFiles { get; init; } = [];
/// <summary>
/// Error message if decryption failed.
/// </summary>
public string? ErrorMessage { get; init; }
public static BundleDecryptResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Result of decrypting a single file.
/// </summary>
public sealed record DecryptedFileResult
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Path to decrypted file.
/// </summary>
public required string DecryptedPath { get; init; }
/// <summary>
/// Whether hash verification passed.
/// </summary>
public bool HashVerified { get; init; }
/// <summary>
/// Computed hash of decrypted content.
/// </summary>
public string? ComputedHash { get; init; }
}

View File

@@ -0,0 +1,443 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Default implementation of the bundle encryption service using AES-256-GCM.
/// </summary>
public sealed class BundleEncryptionService : IBundleEncryptionService
{
private const int DekSizeBytes = 32; // AES-256
private const int NonceSizeBytes = 12; // GCM nonce
private const int TagSizeBytes = 16; // GCM tag
private readonly IAgeKeyWrapper? _ageKeyWrapper;
private readonly IKmsKeyWrapper? _kmsKeyWrapper;
private readonly ICryptoHash _cryptoHash;
private readonly ILogger<BundleEncryptionService> _logger;
public BundleEncryptionService(
ICryptoHash cryptoHash,
ILogger<BundleEncryptionService> logger,
IAgeKeyWrapper? ageKeyWrapper = null,
IKmsKeyWrapper? kmsKeyWrapper = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_ageKeyWrapper = ageKeyWrapper;
_kmsKeyWrapper = kmsKeyWrapper;
}
/// <inheritdoc />
public async Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var validationErrors = ValidateOptions(request.Options);
if (validationErrors.Count > 0)
{
return BundleEncryptResult.Failed(
$"Invalid encryption options: {string.Join("; ", validationErrors)}");
}
if (request.Options.Mode == BundleEncryptionMode.None)
{
_logger.LogDebug("Encryption disabled, skipping");
return new BundleEncryptResult { Success = true };
}
_logger.LogInformation(
"Encrypting {FileCount} files for run {RunId} using {Mode}",
request.Files.Count, request.RunId, request.Options.Mode);
try
{
// Generate DEK
var dek = RandomNumberGenerator.GetBytes(DekSizeBytes);
try
{
// Wrap DEK for all recipients
var recipients = await WrapDekForRecipientsAsync(
dek, request, cancellationToken);
if (recipients.Count == 0)
{
return BundleEncryptResult.Failed("No recipients configured for key wrapping");
}
// Encrypt each file
var encryptedFiles = new List<EncryptedFileResult>();
foreach (var file in request.Files)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await EncryptFileAsync(
file, dek, request.RunId, request.Options.AadFormat, cancellationToken);
encryptedFiles.Add(result);
}
var metadata = new BundleEncryptionMetadata
{
Mode = request.Options.Mode.ToString().ToLowerInvariant(),
AadFormat = request.Options.AadFormat,
Recipients = recipients
};
_logger.LogInformation(
"Encrypted {FileCount} files with {RecipientCount} recipients",
encryptedFiles.Count, recipients.Count);
return new BundleEncryptResult
{
Success = true,
EncryptedFiles = encryptedFiles,
Metadata = metadata
};
}
finally
{
// Zeroize DEK
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Encryption failed for run {RunId}", request.RunId);
return BundleEncryptResult.Failed($"Encryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Decrypting {FileCount} files for run {RunId}",
request.Files.Count, request.RunId);
try
{
// Find a recipient we can unwrap
var dek = await UnwrapDekAsync(request, cancellationToken);
if (dek is null)
{
return BundleDecryptResult.Failed("No matching key available for decryption");
}
try
{
// Decrypt each file
var decryptedFiles = new List<DecryptedFileResult>();
foreach (var file in request.Files)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await DecryptFileAsync(
file, dek, request.RunId, request.Metadata.AadFormat, cancellationToken);
decryptedFiles.Add(result);
}
_logger.LogInformation("Decrypted {FileCount} files", decryptedFiles.Count);
return new BundleDecryptResult
{
Success = true,
DecryptedFiles = decryptedFiles
};
}
finally
{
CryptographicOperations.ZeroMemory(dek);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Decryption failed for run {RunId}", request.RunId);
return BundleDecryptResult.Failed($"Decryption failed: {ex.Message}");
}
}
/// <inheritdoc />
public IReadOnlyList<string> ValidateOptions(BundleEncryptionOptions options)
{
var errors = new List<string>();
if (options.Mode == BundleEncryptionMode.None)
{
return errors;
}
if (options.Mode == BundleEncryptionMode.Age)
{
if (options.Recipients.Count == 0)
{
errors.Add("Age mode requires at least one recipient public key");
}
if (_ageKeyWrapper is null)
{
errors.Add("Age key wrapper not available");
}
else
{
foreach (var recipient in options.Recipients)
{
if (!_ageKeyWrapper.IsValidPublicKey(recipient))
{
errors.Add($"Invalid age public key: {recipient[..Math.Min(10, recipient.Length)]}...");
}
}
}
}
if (options.Mode == BundleEncryptionMode.AesGcmKms)
{
if (string.IsNullOrEmpty(options.KmsKeyId))
{
errors.Add("KMS mode requires a KMS key ID");
}
if (_kmsKeyWrapper is null)
{
errors.Add("KMS key wrapper not available");
}
}
if (string.IsNullOrWhiteSpace(options.AadFormat))
{
errors.Add("AAD format cannot be empty");
}
return errors;
}
private async Task<List<WrappedKeyRecipient>> WrapDekForRecipientsAsync(
byte[] dek,
BundleEncryptRequest request,
CancellationToken cancellationToken)
{
var recipients = new List<WrappedKeyRecipient>();
if (request.Options.Mode == BundleEncryptionMode.Age && _ageKeyWrapper is not null)
{
// Wrap for each age recipient (sorted for determinism)
foreach (var recipientKey in request.Options.Recipients.OrderBy(r => r, StringComparer.Ordinal))
{
var wrappedKey = _ageKeyWrapper.WrapKey(dek, recipientKey);
recipients.Add(new WrappedKeyRecipient
{
Type = "age",
Recipient = recipientKey,
WrappedKey = wrappedKey
});
}
}
if (request.Options.Mode == BundleEncryptionMode.AesGcmKms &&
_kmsKeyWrapper is not null &&
!string.IsNullOrEmpty(request.Options.KmsKeyId))
{
var context = new Dictionary<string, string>
{
["runId"] = request.RunId.ToString("D"),
["tenant"] = request.TenantId.ToString("D")
};
var result = await _kmsKeyWrapper.WrapKeyAsync(
dek, request.Options.KmsKeyId, context, cancellationToken);
recipients.Add(new WrappedKeyRecipient
{
Type = "kms",
KmsKeyId = request.Options.KmsKeyId,
WrappedKey = result.WrappedKey,
KeyId = result.KeyId,
Algorithm = result.Algorithm
});
}
return recipients;
}
private async Task<byte[]?> UnwrapDekAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken)
{
// Try age first if we have a private key
if (!string.IsNullOrEmpty(request.AgePrivateKey) && _ageKeyWrapper is not null)
{
var ageRecipient = request.Metadata.Recipients
.FirstOrDefault(r => r.Type == "age");
if (ageRecipient is not null)
{
try
{
return _ageKeyWrapper.UnwrapKey(ageRecipient.WrappedKey, request.AgePrivateKey);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to unwrap with age key, trying next method");
}
}
}
// Try KMS
if (_kmsKeyWrapper is not null)
{
var kmsRecipient = request.Metadata.Recipients
.FirstOrDefault(r => r.Type == "kms");
if (kmsRecipient is not null && !string.IsNullOrEmpty(kmsRecipient.KmsKeyId))
{
var context = new Dictionary<string, string>
{
["runId"] = request.RunId.ToString("D"),
["tenant"] = request.TenantId.ToString("D")
};
return await _kmsKeyWrapper.UnwrapKeyAsync(
kmsRecipient.WrappedKey, kmsRecipient.KmsKeyId, context, cancellationToken);
}
}
return null;
}
private async Task<EncryptedFileResult> EncryptFileAsync(
BundleFileToEncrypt file,
byte[] dek,
Guid runId,
string aadFormat,
CancellationToken cancellationToken)
{
// Read plaintext
var plaintext = await File.ReadAllBytesAsync(file.SourcePath, cancellationToken);
// Compute plaintext hash for verification
var plaintextHash = _cryptoHash.ComputeHashHexForPurpose(plaintext, HashPurpose.Content);
// Generate nonce
var nonce = RandomNumberGenerator.GetBytes(NonceSizeBytes);
// Compute AAD
var aad = ComputeAad(aadFormat, runId, file.RelativePath);
// Encrypt with AES-GCM
var ciphertext = new byte[plaintext.Length];
var tag = new byte[TagSizeBytes];
using (var aesGcm = new AesGcm(dek, TagSizeBytes))
{
aesGcm.Encrypt(nonce, plaintext, ciphertext, tag, aad);
}
// Write encrypted file: nonce + ciphertext + tag
var encryptedContent = new byte[NonceSizeBytes + ciphertext.Length + TagSizeBytes];
nonce.CopyTo(encryptedContent, 0);
ciphertext.CopyTo(encryptedContent, NonceSizeBytes);
tag.CopyTo(encryptedContent, NonceSizeBytes + ciphertext.Length);
// Ensure directory exists
var destDir = Path.GetDirectoryName(file.DestinationPath);
if (!string.IsNullOrEmpty(destDir))
{
Directory.CreateDirectory(destDir);
}
await File.WriteAllBytesAsync(file.DestinationPath, encryptedContent, cancellationToken);
return new EncryptedFileResult
{
RelativePath = file.RelativePath,
EncryptedPath = file.DestinationPath,
Nonce = Convert.ToBase64String(nonce),
EncryptedSizeBytes = encryptedContent.Length,
PlaintextHash = plaintextHash
};
}
private async Task<DecryptedFileResult> DecryptFileAsync(
BundleFileToDecrypt file,
byte[] dek,
Guid runId,
string aadFormat,
CancellationToken cancellationToken)
{
// Read encrypted file
var encryptedContent = await File.ReadAllBytesAsync(file.SourcePath, cancellationToken);
if (encryptedContent.Length < NonceSizeBytes + TagSizeBytes)
{
throw new CryptographicException($"Encrypted file too small: {file.RelativePath}");
}
// Extract nonce, ciphertext, and tag
var nonce = encryptedContent.AsSpan(0, NonceSizeBytes);
var ciphertextLength = encryptedContent.Length - NonceSizeBytes - TagSizeBytes;
var ciphertext = encryptedContent.AsSpan(NonceSizeBytes, ciphertextLength);
var tag = encryptedContent.AsSpan(NonceSizeBytes + ciphertextLength, TagSizeBytes);
// Validate nonce matches expected
var expectedNonce = Convert.FromBase64String(file.Nonce);
if (!nonce.SequenceEqual(expectedNonce))
{
throw new CryptographicException($"Nonce mismatch for {file.RelativePath}");
}
// Compute AAD
var aad = ComputeAad(aadFormat, runId, file.RelativePath);
// Decrypt
var plaintext = new byte[ciphertextLength];
using (var aesGcm = new AesGcm(dek, TagSizeBytes))
{
aesGcm.Decrypt(nonce, ciphertext, tag, plaintext, aad);
}
// Ensure directory exists
var destDir = Path.GetDirectoryName(file.DestinationPath);
if (!string.IsNullOrEmpty(destDir))
{
Directory.CreateDirectory(destDir);
}
await File.WriteAllBytesAsync(file.DestinationPath, plaintext, cancellationToken);
// Verify hash if expected
var computedHash = _cryptoHash.ComputeHashHexForPurpose(plaintext, HashPurpose.Content);
var hashVerified = string.IsNullOrEmpty(file.ExpectedHash) ||
string.Equals(computedHash, file.ExpectedHash, StringComparison.OrdinalIgnoreCase);
if (!hashVerified)
{
_logger.LogWarning(
"Hash mismatch for {RelativePath}: expected {Expected}, got {Computed}",
file.RelativePath, file.ExpectedHash, computedHash);
}
return new DecryptedFileResult
{
RelativePath = file.RelativePath,
DecryptedPath = file.DestinationPath,
HashVerified = hashVerified,
ComputedHash = computedHash
};
}
private static byte[] ComputeAad(string aadFormat, Guid runId, string relativePath)
{
var aadString = aadFormat
.Replace("{runId}", runId.ToString("D"), StringComparison.OrdinalIgnoreCase)
.Replace("{relativePath}", relativePath, StringComparison.OrdinalIgnoreCase);
return Encoding.UTF8.GetBytes(aadString);
}
}

View File

@@ -0,0 +1,121 @@
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Service for encrypting and decrypting export bundle content.
/// </summary>
public interface IBundleEncryptionService
{
/// <summary>
/// Encrypts bundle files using the specified options.
/// </summary>
/// <param name="request">Encryption request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Encryption result with metadata for provenance.</returns>
Task<BundleEncryptResult> EncryptAsync(
BundleEncryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Decrypts bundle files using the specified metadata.
/// </summary>
/// <param name="request">Decryption request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Decryption result with verification status.</returns>
Task<BundleDecryptResult> DecryptAsync(
BundleDecryptRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates encryption options.
/// </summary>
/// <param name="options">Options to validate.</param>
/// <returns>List of validation errors (empty if valid).</returns>
IReadOnlyList<string> ValidateOptions(BundleEncryptionOptions options);
}
/// <summary>
/// Interface for age key operations (X25519).
/// </summary>
public interface IAgeKeyWrapper
{
/// <summary>
/// Wraps a DEK for an age recipient.
/// </summary>
/// <param name="dek">Data encryption key (32 bytes).</param>
/// <param name="recipientPublicKey">age public key (age1...).</param>
/// <returns>Wrapped key (base64).</returns>
string WrapKey(ReadOnlySpan<byte> dek, string recipientPublicKey);
/// <summary>
/// Unwraps a DEK using an age private key.
/// </summary>
/// <param name="wrappedKey">Wrapped key (base64).</param>
/// <param name="privateKey">age private key (AGE-SECRET-KEY-1...).</param>
/// <returns>Unwrapped DEK (32 bytes).</returns>
byte[] UnwrapKey(string wrappedKey, string privateKey);
/// <summary>
/// Validates an age public key format.
/// </summary>
bool IsValidPublicKey(string publicKey);
/// <summary>
/// Validates an age private key format.
/// </summary>
bool IsValidPrivateKey(string privateKey);
}
/// <summary>
/// Interface for KMS key wrapping operations.
/// </summary>
public interface IKmsKeyWrapper
{
/// <summary>
/// Wraps a DEK using KMS.
/// </summary>
/// <param name="dek">Data encryption key (32 bytes).</param>
/// <param name="kmsKeyId">KMS key identifier.</param>
/// <param name="encryptionContext">Encryption context for key binding.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Wrapped key result.</returns>
Task<KmsWrapResult> WrapKeyAsync(
ReadOnlyMemory<byte> dek,
string kmsKeyId,
IReadOnlyDictionary<string, string> encryptionContext,
CancellationToken cancellationToken = default);
/// <summary>
/// Unwraps a DEK using KMS.
/// </summary>
/// <param name="wrappedKey">Wrapped key (base64).</param>
/// <param name="kmsKeyId">KMS key identifier.</param>
/// <param name="encryptionContext">Encryption context for validation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Unwrapped DEK (32 bytes).</returns>
Task<byte[]> UnwrapKeyAsync(
string wrappedKey,
string kmsKeyId,
IReadOnlyDictionary<string, string> encryptionContext,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of KMS key wrapping.
/// </summary>
public sealed record KmsWrapResult
{
/// <summary>
/// Wrapped key (base64).
/// </summary>
public required string WrappedKey { get; init; }
/// <summary>
/// Algorithm used for wrapping.
/// </summary>
public required string Algorithm { get; init; }
/// <summary>
/// Key ID used (may differ from requested).
/// </summary>
public string? KeyId { get; init; }
}

View File

@@ -0,0 +1,164 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Encryption;
/// <summary>
/// Stub implementation of age key wrapper for testing.
/// In production, use a real age library or CLI-backed implementation.
/// </summary>
/// <remarks>
/// This stub simulates age-style key wrapping using X25519 + HKDF + ChaCha20-Poly1305.
/// For production use, integrate with the actual age specification or age CLI.
/// age public keys start with "age1" and private keys start with "AGE-SECRET-KEY-1".
/// </remarks>
public sealed class StubAgeKeyWrapper : IAgeKeyWrapper
{
private readonly ILogger<StubAgeKeyWrapper> _logger;
// For testing: store wrapped keys in a simple format
// Real implementation would use X25519 ECDH + HKDF + ChaCha20-Poly1305
private const string TestKeyPrefix = "age-wrapped:";
public StubAgeKeyWrapper(ILogger<StubAgeKeyWrapper> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string WrapKey(ReadOnlySpan<byte> dek, string recipientPublicKey)
{
if (!IsValidPublicKey(recipientPublicKey))
{
throw new ArgumentException("Invalid age public key format", nameof(recipientPublicKey));
}
// Stub: Simply encrypt with a derived key from the public key
// Real implementation would use X25519 ephemeral key exchange
_logger.LogDebug("Wrapping DEK for recipient {Recipient}", recipientPublicKey[..10] + "...");
// Use a simple wrapping scheme for testing:
// 1. Generate ephemeral key (simulated as random nonce)
// 2. Derive wrapping key from recipient public key (simulated)
// 3. Encrypt DEK with AES-256-GCM
var nonce = RandomNumberGenerator.GetBytes(12);
var tag = new byte[16];
var ciphertext = new byte[dek.Length];
// Derive a test wrapping key from the public key (NOT cryptographically secure - stub only)
using var sha256 = SHA256.Create();
var wrappingKey = sha256.ComputeHash(Encoding.UTF8.GetBytes(recipientPublicKey));
using var aesGcm = new AesGcm(wrappingKey, 16);
aesGcm.Encrypt(nonce, dek, ciphertext, tag);
// Format: nonce (12) + ciphertext (32) + tag (16) = 60 bytes
var wrapped = new byte[nonce.Length + ciphertext.Length + tag.Length];
nonce.CopyTo(wrapped, 0);
ciphertext.CopyTo(wrapped, nonce.Length);
tag.CopyTo(wrapped, nonce.Length + ciphertext.Length);
return TestKeyPrefix + Convert.ToBase64String(wrapped);
}
/// <inheritdoc />
public byte[] UnwrapKey(string wrappedKey, string privateKey)
{
if (!IsValidPrivateKey(privateKey))
{
throw new ArgumentException("Invalid age private key format", nameof(privateKey));
}
if (!wrappedKey.StartsWith(TestKeyPrefix, StringComparison.Ordinal))
{
throw new CryptographicException("Invalid wrapped key format");
}
_logger.LogDebug("Unwrapping DEK with private key");
var wrapped = Convert.FromBase64String(wrappedKey[TestKeyPrefix.Length..]);
if (wrapped.Length < 12 + 16) // nonce + tag minimum
{
throw new CryptographicException("Wrapped key too short");
}
var nonce = wrapped.AsSpan(0, 12);
var ciphertextLength = wrapped.Length - 12 - 16;
var ciphertext = wrapped.AsSpan(12, ciphertextLength);
var tag = wrapped.AsSpan(12 + ciphertextLength, 16);
// Derive wrapping key from corresponding public key
// In real implementation, derive from private key via X25519
var publicKey = DerivePublicKeyFromPrivate(privateKey);
using var sha256 = SHA256.Create();
var wrappingKey = sha256.ComputeHash(Encoding.UTF8.GetBytes(publicKey));
var dek = new byte[ciphertextLength];
using var aesGcm = new AesGcm(wrappingKey, 16);
aesGcm.Decrypt(nonce, ciphertext, tag, dek);
return dek;
}
/// <inheritdoc />
public bool IsValidPublicKey(string publicKey)
{
// age public keys: age1[58 bech32 chars]
return !string.IsNullOrEmpty(publicKey) &&
publicKey.StartsWith("age1", StringComparison.Ordinal) &&
publicKey.Length >= 59; // age1 + at least 55 chars
}
/// <inheritdoc />
public bool IsValidPrivateKey(string privateKey)
{
// age private keys: AGE-SECRET-KEY-1[58 bech32 chars]
return !string.IsNullOrEmpty(privateKey) &&
privateKey.StartsWith("AGE-SECRET-KEY-1", StringComparison.Ordinal) &&
privateKey.Length >= 74; // AGE-SECRET-KEY-1 + at least 58 chars
}
/// <summary>
/// Stub method to derive public key from private key.
/// Real implementation would use X25519 curve multiplication.
/// </summary>
private static string DerivePublicKeyFromPrivate(string privateKey)
{
// For testing: hash the private key to get a deterministic "public key"
// This is NOT how age works - it's just for stub testing
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(privateKey));
var suffix = Convert.ToHexString(hash).ToLowerInvariant()[..55];
return $"age1{suffix}";
}
}
/// <summary>
/// Test key pair generator for age-style keys.
/// </summary>
public static class TestAgeKeyGenerator
{
/// <summary>
/// Generates a test key pair for use with StubAgeKeyWrapper.
/// </summary>
/// <returns>A tuple of (publicKey, privateKey).</returns>
public static (string PublicKey, string PrivateKey) GenerateKeyPair()
{
var randomBytes = RandomNumberGenerator.GetBytes(32);
var hex = Convert.ToHexString(randomBytes).ToLowerInvariant();
// Generate a valid-looking private key
var privateKey = $"AGE-SECRET-KEY-1{hex}{hex[..26]}";
// Derive public key using same logic as StubAgeKeyWrapper
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(privateKey));
var suffix = Convert.ToHexString(hash).ToLowerInvariant()[..55];
var publicKey = $"age1{suffix}";
return (publicKey, privateKey);
}
}

View File

@@ -0,0 +1,242 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Signature mode for export manifests.
/// </summary>
public enum ExportSignatureMode
{
/// <summary>
/// No signature.
/// </summary>
None = 0,
/// <summary>
/// Detached signature in a separate file.
/// </summary>
Detached = 1,
/// <summary>
/// Embedded signature within the manifest document.
/// </summary>
Embedded = 2,
/// <summary>
/// Both detached and embedded signatures.
/// </summary>
Both = 3
}
/// <summary>
/// Signing algorithm for export manifests.
/// </summary>
public enum ExportSigningAlgorithm
{
/// <summary>
/// HMAC-SHA256 signing.
/// </summary>
HmacSha256 = 1,
/// <summary>
/// ECDSA P-256 with SHA-256 (ES256).
/// </summary>
EcdsaP256Sha256 = 2,
/// <summary>
/// ECDSA P-384 with SHA-384 (ES384).
/// </summary>
EcdsaP384Sha384 = 3,
/// <summary>
/// RSA-PSS with SHA-256 (PS256).
/// </summary>
RsaPssSha256 = 4,
/// <summary>
/// EdDSA (Ed25519).
/// </summary>
EdDsa = 5
}
/// <summary>
/// Request to write an export manifest with optional signing.
/// </summary>
public sealed record ExportManifestWriteRequest(
Guid ExportId,
Guid TenantId,
ExportManifestContent ManifestContent,
ExportProvenanceContent ProvenanceContent,
ExportManifestSigningOptions? SigningOptions = null,
string? OutputDirectory = null,
IReadOnlyDictionary<string, string>? Metadata = null);
/// <summary>
/// Signing options for export manifests.
/// </summary>
public sealed record ExportManifestSigningOptions(
ExportSignatureMode Mode,
ExportSigningAlgorithm Algorithm,
string KeyId,
string? ProviderHint = null,
string? Secret = null);
/// <summary>
/// Content of an export manifest.
/// </summary>
public sealed record ExportManifestContent(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("profile")] ExportManifestProfile Profile,
[property: JsonPropertyName("scope")] ExportManifestScope Scope,
[property: JsonPropertyName("counts")] ExportManifestCounts Counts,
[property: JsonPropertyName("artifacts")] IReadOnlyList<ExportManifestArtifact> Artifacts,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("signature")] ExportManifestSignature? Signature = null);
/// <summary>
/// Export profile metadata in manifest.
/// </summary>
public sealed record ExportManifestProfile(
[property: JsonPropertyName("profileId")] string? ProfileId,
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("variant")] string? Variant);
/// <summary>
/// Scope metadata in manifest.
/// </summary>
public sealed record ExportManifestScope(
[property: JsonPropertyName("kinds")] IReadOnlyList<string> Kinds,
[property: JsonPropertyName("sourceRefs")] IReadOnlyList<string>? SourceRefs,
[property: JsonPropertyName("timeWindow")] ExportManifestTimeWindow? TimeWindow,
[property: JsonPropertyName("ecosystems")] IReadOnlyList<string>? Ecosystems);
/// <summary>
/// Time window in manifest scope.
/// </summary>
public sealed record ExportManifestTimeWindow(
[property: JsonPropertyName("from")] DateTimeOffset From,
[property: JsonPropertyName("to")] DateTimeOffset To);
/// <summary>
/// Counts in manifest.
/// </summary>
public sealed record ExportManifestCounts(
[property: JsonPropertyName("total")] int Total,
[property: JsonPropertyName("successful")] int Successful,
[property: JsonPropertyName("failed")] int Failed,
[property: JsonPropertyName("skipped")] int Skipped,
[property: JsonPropertyName("byKind")] IReadOnlyDictionary<string, int> ByKind);
/// <summary>
/// Artifact entry in manifest.
/// </summary>
public sealed record ExportManifestArtifact(
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("sha256")] string Sha256,
[property: JsonPropertyName("sizeBytes")] long SizeBytes,
[property: JsonPropertyName("contentType")] string ContentType,
[property: JsonPropertyName("category")] string? Category);
/// <summary>
/// Embedded signature in manifest.
/// </summary>
public sealed record ExportManifestSignature(
[property: JsonPropertyName("algorithm")] string Algorithm,
[property: JsonPropertyName("keyId")] string KeyId,
[property: JsonPropertyName("value")] string Value,
[property: JsonPropertyName("signedAt")] DateTimeOffset SignedAt,
[property: JsonPropertyName("provider")] string? Provider);
/// <summary>
/// Content of export provenance document.
/// </summary>
public sealed record ExportProvenanceContent(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("subjects")] IReadOnlyList<ExportProvenanceSubject> Subjects,
[property: JsonPropertyName("inputs")] ExportProvenanceInputs Inputs,
[property: JsonPropertyName("builder")] ExportProvenanceBuilder Builder,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("signature")] ExportManifestSignature? Signature = null);
/// <summary>
/// Subject in provenance document.
/// </summary>
public sealed record ExportProvenanceSubject(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("digest")] IReadOnlyDictionary<string, string> Digest);
/// <summary>
/// Inputs in provenance document.
/// </summary>
public sealed record ExportProvenanceInputs(
[property: JsonPropertyName("profileId")] string? ProfileId,
[property: JsonPropertyName("scopeKinds")] IReadOnlyList<string> ScopeKinds,
[property: JsonPropertyName("sourceRefs")] IReadOnlyList<string>? SourceRefs,
[property: JsonPropertyName("correlationId")] string? CorrelationId);
/// <summary>
/// Builder info in provenance document.
/// </summary>
public sealed record ExportProvenanceBuilder(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("buildTimestamp")] DateTimeOffset? BuildTimestamp);
/// <summary>
/// Result of writing export manifest.
/// </summary>
public sealed record ExportManifestWriteResult
{
public bool Success { get; init; }
public string? ErrorMessage { get; init; }
public string? ManifestPath { get; init; }
public string? ManifestJson { get; init; }
public string? ProvenancePath { get; init; }
public string? ProvenanceJson { get; init; }
public string? DetachedSignaturePath { get; init; }
public ExportManifestSignature? ManifestSignature { get; init; }
public ExportManifestSignature? ProvenanceSignature { get; init; }
public static ExportManifestWriteResult Succeeded(
string manifestPath,
string manifestJson,
string provenancePath,
string provenanceJson,
string? detachedSignaturePath = null,
ExportManifestSignature? manifestSignature = null,
ExportManifestSignature? provenanceSignature = null) =>
new()
{
Success = true,
ManifestPath = manifestPath,
ManifestJson = manifestJson,
ProvenancePath = provenancePath,
ProvenanceJson = provenanceJson,
DetachedSignaturePath = detachedSignaturePath,
ManifestSignature = manifestSignature,
ProvenanceSignature = provenanceSignature
};
public static ExportManifestWriteResult Failed(string errorMessage) =>
new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// DSSE envelope for detached signatures.
/// </summary>
public sealed record ExportManifestDsseEnvelope(
[property: JsonPropertyName("payloadType")] string PayloadType,
[property: JsonPropertyName("payload")] string Payload,
[property: JsonPropertyName("signatures")] IReadOnlyList<ExportManifestDsseSignatureEntry> Signatures);
/// <summary>
/// Signature entry in DSSE envelope.
/// </summary>
public sealed record ExportManifestDsseSignatureEntry(
[property: JsonPropertyName("sig")] string Signature,
[property: JsonPropertyName("keyid")] string KeyId);

View File

@@ -0,0 +1,397 @@
using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Default implementation of export manifest writer with KMS and HMAC signing support.
/// </summary>
public sealed class ExportManifestWriter : IExportManifestWriter
{
private const string ManifestPayloadType = "application/vnd.stellaops.export.manifest+json";
private const string ProvenancePayloadType = "application/vnd.stellaops.export.provenance+json";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ILogger<ExportManifestWriter> _logger;
private readonly ICryptoProviderRegistry? _cryptoRegistry;
private readonly ICryptoHmac? _cryptoHmac;
private readonly TimeProvider _timeProvider;
public ExportManifestWriter(
ILogger<ExportManifestWriter> logger,
ICryptoProviderRegistry? cryptoRegistry = null,
ICryptoHmac? cryptoHmac = null,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cryptoRegistry = cryptoRegistry;
_cryptoHmac = cryptoHmac;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
public async Task<ExportManifestWriteResult> WriteAsync(
ExportManifestWriteRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
_logger.LogDebug("Writing export manifest for export {ExportId}", request.ExportId);
// Serialize manifest and provenance
var manifestJson = JsonSerializer.Serialize(request.ManifestContent, SerializerOptions);
var provenanceJson = JsonSerializer.Serialize(request.ProvenanceContent, SerializerOptions);
ExportManifestSignature? manifestSig = null;
ExportManifestSignature? provenanceSig = null;
string? detachedSignaturePath = null;
// Apply signing if requested
if (request.SigningOptions is not null && request.SigningOptions.Mode != ExportSignatureMode.None)
{
var signer = CreateSigner(request.SigningOptions);
// Sign manifest
var manifestEnvelope = await SignContentAsync(
manifestJson,
ManifestPayloadType,
signer,
cancellationToken);
// Sign provenance
var provenanceEnvelope = await SignContentAsync(
provenanceJson,
ProvenancePayloadType,
signer,
cancellationToken);
var signedAt = _timeProvider.GetUtcNow();
manifestSig = new ExportManifestSignature(
signer.Algorithm,
signer.KeyId,
manifestEnvelope.Signatures[0].Signature,
signedAt,
signer.Provider);
provenanceSig = new ExportManifestSignature(
signer.Algorithm,
signer.KeyId,
provenanceEnvelope.Signatures[0].Signature,
signedAt,
signer.Provider);
// Write detached signatures if requested
if (request.SigningOptions.Mode is ExportSignatureMode.Detached or ExportSignatureMode.Both)
{
if (!string.IsNullOrWhiteSpace(request.OutputDirectory))
{
var signaturePath = Path.Combine(
request.OutputDirectory,
$"export-{request.ExportId:N}-signatures.dsse.json");
var combinedEnvelope = new
{
manifestSignature = manifestEnvelope,
provenanceSignature = provenanceEnvelope,
signedAt,
keyId = signer.KeyId,
algorithm = signer.Algorithm,
provider = signer.Provider
};
await File.WriteAllTextAsync(
signaturePath,
JsonSerializer.Serialize(combinedEnvelope, SerializerOptions),
cancellationToken);
detachedSignaturePath = signaturePath;
}
}
// Embed signatures if requested
if (request.SigningOptions.Mode is ExportSignatureMode.Embedded or ExportSignatureMode.Both)
{
var manifestWithSig = request.ManifestContent with { Signature = manifestSig };
var provenanceWithSig = request.ProvenanceContent with { Signature = provenanceSig };
manifestJson = JsonSerializer.Serialize(manifestWithSig, SerializerOptions);
provenanceJson = JsonSerializer.Serialize(provenanceWithSig, SerializerOptions);
}
}
// Write files if output directory specified
string manifestPath = string.Empty;
string provenancePath = string.Empty;
if (!string.IsNullOrWhiteSpace(request.OutputDirectory))
{
Directory.CreateDirectory(request.OutputDirectory);
manifestPath = Path.Combine(request.OutputDirectory, "export-manifest.json");
provenancePath = Path.Combine(request.OutputDirectory, "export-provenance.json");
await File.WriteAllTextAsync(manifestPath, manifestJson, cancellationToken);
await File.WriteAllTextAsync(provenancePath, provenanceJson, cancellationToken);
}
_logger.LogInformation(
"Export manifest written for {ExportId} with signature mode {Mode}",
request.ExportId,
request.SigningOptions?.Mode ?? ExportSignatureMode.None);
return ExportManifestWriteResult.Succeeded(
manifestPath,
manifestJson,
provenancePath,
provenanceJson,
detachedSignaturePath,
manifestSig,
provenanceSig);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to write export manifest for {ExportId}", request.ExportId);
return ExportManifestWriteResult.Failed($"Failed to write manifest: {ex.Message}");
}
}
/// <inheritdoc/>
public async Task<ExportManifestDsseEnvelope> SignManifestAsync(
string manifestJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifestJson);
ArgumentNullException.ThrowIfNull(signingOptions);
var signer = CreateSigner(signingOptions);
return await SignContentAsync(manifestJson, ManifestPayloadType, signer, cancellationToken);
}
/// <inheritdoc/>
public async Task<ExportManifestDsseEnvelope> SignProvenanceAsync(
string provenanceJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(provenanceJson);
ArgumentNullException.ThrowIfNull(signingOptions);
var signer = CreateSigner(signingOptions);
return await SignContentAsync(provenanceJson, ProvenancePayloadType, signer, cancellationToken);
}
/// <inheritdoc/>
public async Task<bool> VerifySignatureAsync(
string content,
ExportManifestDsseEnvelope envelope,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
ArgumentNullException.ThrowIfNull(envelope);
ArgumentNullException.ThrowIfNull(signingOptions);
try
{
var signer = CreateSigner(signingOptions);
var pae = BuildPae(envelope.PayloadType, Encoding.UTF8.GetBytes(content));
foreach (var sig in envelope.Signatures)
{
var sigBytes = Convert.FromBase64String(sig.Signature);
if (await signer.VerifyAsync(pae, sigBytes, cancellationToken))
{
return true;
}
}
return false;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Signature verification failed");
return false;
}
}
private IExportManifestSigner CreateSigner(ExportManifestSigningOptions options)
{
return options.Algorithm switch
{
ExportSigningAlgorithm.HmacSha256 => CreateHmacSigner(options),
ExportSigningAlgorithm.EcdsaP256Sha256 => CreateKmsSigner(options, "ES256"),
ExportSigningAlgorithm.EcdsaP384Sha384 => CreateKmsSigner(options, "ES384"),
ExportSigningAlgorithm.RsaPssSha256 => CreateKmsSigner(options, "PS256"),
ExportSigningAlgorithm.EdDsa => CreateKmsSigner(options, "EdDSA"),
_ => throw new NotSupportedException($"Signing algorithm '{options.Algorithm}' is not supported.")
};
}
private IExportManifestSigner CreateHmacSigner(ExportManifestSigningOptions options)
{
if (_cryptoHmac is null)
{
throw new InvalidOperationException("HMAC signing requires ICryptoHmac to be configured.");
}
if (string.IsNullOrWhiteSpace(options.Secret))
{
throw new ArgumentException("HMAC signing requires a secret key.", nameof(options));
}
return new HmacExportManifestSigner(_cryptoHmac, options.Secret, options.KeyId);
}
private IExportManifestSigner CreateKmsSigner(ExportManifestSigningOptions options, string algorithmId)
{
if (_cryptoRegistry is null)
{
throw new InvalidOperationException(
"KMS signing requires ICryptoProviderRegistry to be configured.");
}
var keyRef = new CryptoKeyReference(options.KeyId, options.ProviderHint);
var resolution = _cryptoRegistry.ResolveSigner(
CryptoCapability.Signing,
algorithmId,
keyRef,
options.ProviderHint);
return new KmsExportManifestSigner(resolution.Signer, resolution.ProviderName);
}
private async Task<ExportManifestDsseEnvelope> SignContentAsync(
string content,
string payloadType,
IExportManifestSigner signer,
CancellationToken cancellationToken)
{
var contentBytes = Encoding.UTF8.GetBytes(content);
var pae = BuildPae(payloadType, contentBytes);
var signature = await signer.SignAsync(pae, cancellationToken);
var signatureBase64 = Convert.ToBase64String(signature);
return new ExportManifestDsseEnvelope(
payloadType,
Convert.ToBase64String(contentBytes),
[new ExportManifestDsseSignatureEntry(signatureBase64, signer.KeyId)]);
}
/// <summary>
/// Builds DSSE Pre-Authentication Encoding (PAE).
/// PAE = "DSSEv1" + SP + LEN(payloadType) + SP + payloadType + SP + LEN(payload) + SP + payload
/// </summary>
private static byte[] BuildPae(string payloadType, byte[] payload)
{
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var preamble = Encoding.UTF8.GetBytes("DSSEv1 ");
var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture);
var payloadLenStr = payload.Length.ToString(CultureInfo.InvariantCulture);
var result = new List<byte>(
preamble.Length +
typeLenStr.Length + 1 +
typeBytes.Length + 1 +
payloadLenStr.Length + 1 +
payload.Length);
result.AddRange(preamble);
result.AddRange(Encoding.UTF8.GetBytes(typeLenStr));
result.Add(0x20); // space
result.AddRange(typeBytes);
result.Add(0x20); // space
result.AddRange(Encoding.UTF8.GetBytes(payloadLenStr));
result.Add(0x20); // space
result.AddRange(payload);
return result.ToArray();
}
}
/// <summary>
/// HMAC-based export manifest signer.
/// </summary>
internal sealed class HmacExportManifestSigner : IExportManifestSigner
{
private readonly ICryptoHmac _cryptoHmac;
private readonly byte[] _key;
public HmacExportManifestSigner(ICryptoHmac cryptoHmac, string secret, string keyId)
{
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
if (string.IsNullOrWhiteSpace(secret))
{
throw new ArgumentException("Secret cannot be empty.", nameof(secret));
}
_key = Encoding.UTF8.GetBytes(secret);
KeyId = string.IsNullOrWhiteSpace(keyId) ? "hmac-sha256" : keyId;
}
public string KeyId { get; }
public string Algorithm => "HMAC-SHA256";
public string? Provider => "HMAC";
public Task<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var signature = _cryptoHmac.ComputeHmacForPurpose(_key, data.Span, HmacPurpose.Signing);
return Task.FromResult(signature);
}
public Task<bool> VerifyAsync(
ReadOnlyMemory<byte> data,
ReadOnlyMemory<byte> signature,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var expected = _cryptoHmac.ComputeHmacForPurpose(_key, data.Span, HmacPurpose.Signing);
return Task.FromResult(expected.AsSpan().SequenceEqual(signature.Span));
}
}
/// <summary>
/// KMS-backed export manifest signer using ICryptoProviderRegistry.
/// </summary>
internal sealed class KmsExportManifestSigner : IExportManifestSigner
{
private readonly ICryptoSigner _signer;
public KmsExportManifestSigner(ICryptoSigner signer, string providerName)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
Provider = providerName;
}
public string KeyId => _signer.KeyId;
public string Algorithm => _signer.AlgorithmId;
public string? Provider { get; }
public async Task<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
return await _signer.SignAsync(data, cancellationToken);
}
public async Task<bool> VerifyAsync(
ReadOnlyMemory<byte> data,
ReadOnlyMemory<byte> signature,
CancellationToken cancellationToken = default)
{
return await _signer.VerifyAsync(data, signature, cancellationToken);
}
}

View File

@@ -0,0 +1,93 @@
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Interface for writing export manifests and provenance documents with optional signing.
/// </summary>
public interface IExportManifestWriter
{
/// <summary>
/// Writes manifest and provenance documents with optional signing.
/// </summary>
/// <param name="request">The write request with manifest/provenance content and signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Write result with paths and signatures.</returns>
Task<ExportManifestWriteResult> WriteAsync(
ExportManifestWriteRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Signs manifest content and returns a DSSE envelope.
/// </summary>
/// <param name="manifestJson">The manifest JSON to sign.</param>
/// <param name="signingOptions">Signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>DSSE envelope with signature.</returns>
Task<ExportManifestDsseEnvelope> SignManifestAsync(
string manifestJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default);
/// <summary>
/// Signs provenance content and returns a DSSE envelope.
/// </summary>
/// <param name="provenanceJson">The provenance JSON to sign.</param>
/// <param name="signingOptions">Signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>DSSE envelope with signature.</returns>
Task<ExportManifestDsseEnvelope> SignProvenanceAsync(
string provenanceJson,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a DSSE signature against content.
/// </summary>
/// <param name="content">The original content that was signed.</param>
/// <param name="envelope">The DSSE envelope with signature.</param>
/// <param name="signingOptions">Signing options for verification.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if signature is valid.</returns>
Task<bool> VerifySignatureAsync(
string content,
ExportManifestDsseEnvelope envelope,
ExportManifestSigningOptions signingOptions,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for export manifest signing operations.
/// </summary>
public interface IExportManifestSigner
{
/// <summary>
/// Signs data using the configured algorithm and key.
/// </summary>
/// <param name="data">Data to sign.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Signature bytes.</returns>
Task<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a signature against data.
/// </summary>
/// <param name="data">Original data.</param>
/// <param name="signature">Signature to verify.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if valid.</returns>
Task<bool> VerifyAsync(ReadOnlyMemory<byte> data, ReadOnlyMemory<byte> signature, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the key ID for this signer.
/// </summary>
string KeyId { get; }
/// <summary>
/// Gets the algorithm name for this signer.
/// </summary>
string Algorithm { get; }
/// <summary>
/// Gets the provider name for this signer.
/// </summary>
string? Provider { get; }
}

View File

@@ -0,0 +1,69 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.Manifest;
/// <summary>
/// Extension methods for registering manifest writer services.
/// </summary>
public static class ManifestServiceCollectionExtensions
{
/// <summary>
/// Registers the export manifest writer with default configuration.
/// </summary>
public static IServiceCollection AddExportManifestWriter(this IServiceCollection services)
{
services.AddSingleton<IExportManifestWriter>(sp =>
{
var logger = sp.GetRequiredService<ILogger<ExportManifestWriter>>();
var cryptoRegistry = sp.GetService<ICryptoProviderRegistry>();
var cryptoHmac = sp.GetService<ICryptoHmac>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new ExportManifestWriter(logger, cryptoRegistry, cryptoHmac, timeProvider);
});
return services;
}
/// <summary>
/// Registers the export manifest writer with HMAC signing support only.
/// </summary>
public static IServiceCollection AddExportManifestWriterWithHmac(
this IServiceCollection services,
ICryptoHmac cryptoHmac)
{
ArgumentNullException.ThrowIfNull(cryptoHmac);
services.AddSingleton<IExportManifestWriter>(sp =>
{
var logger = sp.GetRequiredService<ILogger<ExportManifestWriter>>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new ExportManifestWriter(logger, cryptoRegistry: null, cryptoHmac, timeProvider);
});
return services;
}
/// <summary>
/// Registers the export manifest writer with KMS signing support only.
/// </summary>
public static IServiceCollection AddExportManifestWriterWithKms(
this IServiceCollection services,
ICryptoProviderRegistry cryptoRegistry)
{
ArgumentNullException.ThrowIfNull(cryptoRegistry);
services.AddSingleton<IExportManifestWriter>(sp =>
{
var logger = sp.GetRequiredService<ILogger<ExportManifestWriter>>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new ExportManifestWriter(logger, cryptoRegistry, cryptoHmac: null, timeProvider);
});
return services;
}
}

View File

@@ -0,0 +1,305 @@
using System.Collections.Concurrent;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// In-memory implementation of the base manifest store for testing and simple deployments.
/// </summary>
public sealed class InMemoryMirrorBaseManifestStore : IMirrorBaseManifestStore
{
private readonly ConcurrentDictionary<string, ManifestRecord> _manifests = new(StringComparer.OrdinalIgnoreCase);
private static string GetKey(Guid runId, Guid tenantId) => $"{tenantId:D}:{runId:D}";
/// <inheritdoc />
public Task<IReadOnlyList<MirrorBaseManifestEntry>?> GetBaseManifestEntriesAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default)
{
var key = GetKey(runId, tenantId);
if (_manifests.TryGetValue(key, out var record))
{
return Task.FromResult<IReadOnlyList<MirrorBaseManifestEntry>?>(record.Entries);
}
return Task.FromResult<IReadOnlyList<MirrorBaseManifestEntry>?>(null);
}
/// <inheritdoc />
public Task<string?> GetManifestDigestAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default)
{
var key = GetKey(runId, tenantId);
if (_manifests.TryGetValue(key, out var record))
{
return Task.FromResult<string?>(record.Digest);
}
return Task.FromResult<string?>(null);
}
/// <inheritdoc />
public Task SaveManifestEntriesAsync(
Guid runId,
Guid tenantId,
string manifestDigest,
IReadOnlyList<MirrorBaseManifestEntry> entries,
CancellationToken cancellationToken = default)
{
var key = GetKey(runId, tenantId);
_manifests[key] = new ManifestRecord(manifestDigest, entries);
return Task.CompletedTask;
}
/// <summary>
/// Clears all stored manifests (for testing).
/// </summary>
public void Clear() => _manifests.Clear();
/// <summary>
/// Gets the number of stored manifests.
/// </summary>
public int Count => _manifests.Count;
private sealed record ManifestRecord(string Digest, IReadOnlyList<MirrorBaseManifestEntry> Entries);
}
/// <summary>
/// In-memory implementation of the content store for testing and simple deployments.
/// </summary>
public sealed class InMemoryMirrorContentStore : IMirrorContentStore
{
private readonly ConcurrentDictionary<string, byte[]> _content = new(StringComparer.OrdinalIgnoreCase);
private readonly ICryptoHash _cryptoHash;
private readonly string _tempDirectory;
public InMemoryMirrorContentStore(ICryptoHash cryptoHash, string? tempDirectory = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_tempDirectory = tempDirectory ?? Path.Combine(Path.GetTempPath(), "mirror-content-store");
Directory.CreateDirectory(_tempDirectory);
}
/// <inheritdoc />
public Task<bool> ExistsAsync(string contentHash, CancellationToken cancellationToken = default)
{
return Task.FromResult(_content.ContainsKey(contentHash));
}
/// <inheritdoc />
public Task<Stream?> GetAsync(string contentHash, CancellationToken cancellationToken = default)
{
if (_content.TryGetValue(contentHash, out var bytes))
{
return Task.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
}
return Task.FromResult<Stream?>(null);
}
/// <inheritdoc />
public async Task<string> StoreAsync(Stream content, string? expectedHash = null, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
using var ms = new MemoryStream();
await content.CopyToAsync(ms, cancellationToken);
var bytes = ms.ToArray();
var hash = _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
if (!string.IsNullOrEmpty(expectedHash) &&
!string.Equals(hash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Content hash mismatch: expected {expectedHash}, computed {hash}");
}
_content[hash] = bytes;
// Also write to temp file for GetLocalPath
var localPath = Path.Combine(_tempDirectory, hash);
await File.WriteAllBytesAsync(localPath, bytes, cancellationToken);
return hash;
}
/// <inheritdoc />
public string? GetLocalPath(string contentHash)
{
var path = Path.Combine(_tempDirectory, contentHash);
return File.Exists(path) ? path : null;
}
/// <summary>
/// Clears all stored content (for testing).
/// </summary>
public void Clear()
{
_content.Clear();
if (Directory.Exists(_tempDirectory))
{
foreach (var file in Directory.GetFiles(_tempDirectory))
{
try { File.Delete(file); } catch { /* ignore */ }
}
}
}
/// <summary>
/// Gets the number of stored content items.
/// </summary>
public int Count => _content.Count;
}
/// <summary>
/// Filesystem-based implementation of the content store for production use.
/// Uses content-addressable storage with SHA-256 hashes.
/// </summary>
public sealed class FileSystemMirrorContentStore : IMirrorContentStore, IDisposable
{
private readonly string _storePath;
private readonly ICryptoHash _cryptoHash;
private readonly bool _ownsDirectory;
public FileSystemMirrorContentStore(string storePath, ICryptoHash cryptoHash, bool createIfMissing = true)
{
_storePath = storePath ?? throw new ArgumentNullException(nameof(storePath));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
if (!Directory.Exists(_storePath))
{
if (createIfMissing)
{
Directory.CreateDirectory(_storePath);
_ownsDirectory = true;
}
else
{
throw new DirectoryNotFoundException($"Content store directory not found: {_storePath}");
}
}
}
/// <inheritdoc />
public Task<bool> ExistsAsync(string contentHash, CancellationToken cancellationToken = default)
{
var path = GetContentPath(contentHash);
return Task.FromResult(File.Exists(path));
}
/// <inheritdoc />
public Task<Stream?> GetAsync(string contentHash, CancellationToken cancellationToken = default)
{
var path = GetContentPath(contentHash);
if (!File.Exists(path))
{
return Task.FromResult<Stream?>(null);
}
var stream = new FileStream(
path,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 64 * 1024,
FileOptions.Asynchronous | FileOptions.SequentialScan);
return Task.FromResult<Stream?>(stream);
}
/// <inheritdoc />
public async Task<string> StoreAsync(Stream content, string? expectedHash = null, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
// Write to temp file first
var tempPath = Path.Combine(_storePath, $".tmp-{Guid.NewGuid():N}");
try
{
await using (var tempStream = new FileStream(
tempPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 64 * 1024,
FileOptions.Asynchronous))
{
await content.CopyToAsync(tempStream, cancellationToken);
}
// Compute hash
var bytes = await File.ReadAllBytesAsync(tempPath, cancellationToken);
var hash = _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
if (!string.IsNullOrEmpty(expectedHash) &&
!string.Equals(hash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Content hash mismatch: expected {expectedHash}, computed {hash}");
}
// Move to final location
var finalPath = GetContentPath(hash);
EnsureDirectoryExists(finalPath);
if (File.Exists(finalPath))
{
// Content already exists, just delete temp
File.Delete(tempPath);
}
else
{
File.Move(tempPath, finalPath);
}
return hash;
}
catch
{
try { File.Delete(tempPath); } catch { /* ignore */ }
throw;
}
}
/// <inheritdoc />
public string? GetLocalPath(string contentHash)
{
var path = GetContentPath(contentHash);
return File.Exists(path) ? path : null;
}
private string GetContentPath(string contentHash)
{
// Use sharded directory structure: first 2 chars / next 2 chars / full hash
if (contentHash.Length < 4)
{
return Path.Combine(_storePath, contentHash);
}
return Path.Combine(
_storePath,
contentHash[..2],
contentHash[2..4],
contentHash);
}
private static void EnsureDirectoryExists(string filePath)
{
var dir = Path.GetDirectoryName(filePath);
if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
}
public void Dispose()
{
// Only clean up if we created the directory and it's a temp directory
if (_ownsDirectory && _storePath.Contains("tmp", StringComparison.OrdinalIgnoreCase))
{
try { Directory.Delete(_storePath, recursive: true); } catch { /* ignore */ }
}
}
}

View File

@@ -0,0 +1,304 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Request to compute a delta between a base export and current items.
/// </summary>
public sealed record MirrorDeltaComputeRequest
{
/// <summary>
/// Base export run ID to compare against.
/// </summary>
public required Guid BaseRunId { get; init; }
/// <summary>
/// Base manifest digest for validation.
/// </summary>
public required string BaseManifestDigest { get; init; }
/// <summary>
/// Tenant ID for scoping.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Current items to compare with base.
/// </summary>
public required IReadOnlyList<MirrorDeltaItem> CurrentItems { get; init; }
/// <summary>
/// Whether to reset the baseline (include all items regardless of changes).
/// </summary>
public bool ResetBaseline { get; init; }
}
/// <summary>
/// Item for delta comparison.
/// </summary>
public sealed record MirrorDeltaItem
{
/// <summary>
/// Unique item identifier.
/// </summary>
public required string ItemId { get; init; }
/// <summary>
/// Category of the item.
/// </summary>
public required MirrorBundleDataCategory Category { get; init; }
/// <summary>
/// Content-addressable hash (SHA-256) of the item.
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// Path within the bundle.
/// </summary>
public required string BundlePath { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long SizeBytes { get; init; }
/// <summary>
/// Last modified timestamp.
/// </summary>
public DateTimeOffset? ModifiedAt { get; init; }
/// <summary>
/// Source path to the item content.
/// </summary>
public string? SourcePath { get; init; }
}
/// <summary>
/// Result of computing a delta.
/// </summary>
public sealed record MirrorDeltaComputeResult
{
/// <summary>
/// Whether the computation succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Items that were added since the base export.
/// </summary>
public IReadOnlyList<MirrorDeltaItem> AddedItems { get; init; } = [];
/// <summary>
/// Items that were changed since the base export.
/// </summary>
public IReadOnlyList<MirrorDeltaChangeItem> ChangedItems { get; init; } = [];
/// <summary>
/// Items that were removed since the base export.
/// </summary>
public IReadOnlyList<MirrorDeltaRemovedItem> RemovedItems { get; init; } = [];
/// <summary>
/// Items that are unchanged and can be skipped (content-addressed reuse).
/// </summary>
public IReadOnlyList<MirrorDeltaItem> UnchangedItems { get; init; } = [];
/// <summary>
/// Error message if computation failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Base export ID used for comparison.
/// </summary>
public string? BaseExportId { get; init; }
/// <summary>
/// Base manifest digest used for comparison.
/// </summary>
public string? BaseManifestDigest { get; init; }
/// <summary>
/// Whether baseline was reset.
/// </summary>
public bool BaselineReset { get; init; }
/// <summary>
/// Counts by category.
/// </summary>
public MirrorDeltaCategoryCounts Counts { get; init; } = new();
public static MirrorDeltaComputeResult Failed(string errorMessage)
=> new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// A changed item with both old and new hashes.
/// </summary>
public sealed record MirrorDeltaChangeItem
{
/// <summary>
/// The current item state.
/// </summary>
public required MirrorDeltaItem Current { get; init; }
/// <summary>
/// Hash of the previous version.
/// </summary>
public required string PreviousContentHash { get; init; }
/// <summary>
/// Previous size in bytes.
/// </summary>
public long PreviousSizeBytes { get; init; }
}
/// <summary>
/// A removed item.
/// </summary>
public sealed record MirrorDeltaRemovedItem
{
/// <summary>
/// Item identifier.
/// </summary>
public required string ItemId { get; init; }
/// <summary>
/// Category of the removed item.
/// </summary>
public required MirrorBundleDataCategory Category { get; init; }
/// <summary>
/// Bundle path that was removed.
/// </summary>
public required string BundlePath { get; init; }
/// <summary>
/// Hash of the content that was removed.
/// </summary>
public required string ContentHash { get; init; }
}
/// <summary>
/// Counts of delta changes by category.
/// </summary>
public sealed record MirrorDeltaCategoryCounts
{
[JsonPropertyName("added")]
public MirrorBundleDeltaCounts Added { get; init; } = new(0, 0, 0);
[JsonPropertyName("changed")]
public MirrorBundleDeltaCounts Changed { get; init; } = new(0, 0, 0);
[JsonPropertyName("removed")]
public MirrorBundleDeltaCounts Removed { get; init; } = new(0, 0, 0);
[JsonPropertyName("unchanged")]
public MirrorBundleDeltaCounts Unchanged { get; init; } = new(0, 0, 0);
}
/// <summary>
/// Manifest entry from a base export for delta comparison.
/// </summary>
public sealed record MirrorBaseManifestEntry
{
/// <summary>
/// Item identifier.
/// </summary>
public required string ItemId { get; init; }
/// <summary>
/// Category of the item.
/// </summary>
public required MirrorBundleDataCategory Category { get; init; }
/// <summary>
/// Bundle path.
/// </summary>
public required string BundlePath { get; init; }
/// <summary>
/// Content hash (SHA-256).
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long SizeBytes { get; init; }
}
/// <summary>
/// Interface for retrieving base export manifests for delta comparison.
/// </summary>
public interface IMirrorBaseManifestStore
{
/// <summary>
/// Gets the manifest entries from a base export.
/// </summary>
/// <param name="runId">The base export run ID.</param>
/// <param name="tenantId">Tenant ID for scoping.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Manifest entries, or null if not found.</returns>
Task<IReadOnlyList<MirrorBaseManifestEntry>?> GetBaseManifestEntriesAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the manifest digest for a base export.
/// </summary>
Task<string?> GetManifestDigestAsync(
Guid runId,
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves manifest entries for a completed export (for future delta comparisons).
/// </summary>
Task SaveManifestEntriesAsync(
Guid runId,
Guid tenantId,
string manifestDigest,
IReadOnlyList<MirrorBaseManifestEntry> entries,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for content-addressed storage for delta reuse.
/// </summary>
public interface IMirrorContentStore
{
/// <summary>
/// Checks if content with the given hash exists.
/// </summary>
/// <param name="contentHash">SHA-256 hash of the content.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if content exists.</returns>
Task<bool> ExistsAsync(string contentHash, CancellationToken cancellationToken = default);
/// <summary>
/// Gets content by hash.
/// </summary>
/// <param name="contentHash">SHA-256 hash of the content.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Content stream, or null if not found.</returns>
Task<Stream?> GetAsync(string contentHash, CancellationToken cancellationToken = default);
/// <summary>
/// Stores content and returns its hash.
/// </summary>
/// <param name="content">Content stream.</param>
/// <param name="expectedHash">Optional expected hash for validation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Hash of the stored content.</returns>
Task<string> StoreAsync(Stream content, string? expectedHash = null, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the local file path for cached content (for bundle building).
/// </summary>
/// <param name="contentHash">SHA-256 hash of the content.</param>
/// <returns>File path if content is cached locally, null otherwise.</returns>
string? GetLocalPath(string contentHash);
}

View File

@@ -0,0 +1,213 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Service for computing deltas between mirror bundle exports.
/// </summary>
public interface IMirrorDeltaService
{
/// <summary>
/// Computes the delta between a base export and current items.
/// </summary>
/// <param name="request">Delta computation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Delta computation result.</returns>
Task<MirrorDeltaComputeResult> ComputeDeltaAsync(
MirrorDeltaComputeRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of the mirror delta service.
/// </summary>
public sealed class MirrorDeltaService : IMirrorDeltaService
{
private readonly IMirrorBaseManifestStore _manifestStore;
private readonly ILogger<MirrorDeltaService> _logger;
public MirrorDeltaService(
IMirrorBaseManifestStore manifestStore,
ILogger<MirrorDeltaService> logger)
{
_manifestStore = manifestStore ?? throw new ArgumentNullException(nameof(manifestStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<MirrorDeltaComputeResult> ComputeDeltaAsync(
MirrorDeltaComputeRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Computing delta against base export {BaseRunId} for tenant {TenantId}",
request.BaseRunId, request.TenantId);
// If reset baseline is requested, treat all items as added
if (request.ResetBaseline)
{
_logger.LogInformation("Baseline reset requested - all items will be included");
return CreateResetBaselineResult(request);
}
// Get base manifest entries
var baseEntries = await _manifestStore.GetBaseManifestEntriesAsync(
request.BaseRunId, request.TenantId, cancellationToken);
if (baseEntries is null || baseEntries.Count == 0)
{
_logger.LogWarning(
"Base manifest not found for run {BaseRunId}, treating as full export",
request.BaseRunId);
return CreateResetBaselineResult(request);
}
// Validate manifest digest
var storedDigest = await _manifestStore.GetManifestDigestAsync(
request.BaseRunId, request.TenantId, cancellationToken);
if (!string.IsNullOrEmpty(request.BaseManifestDigest) &&
!string.IsNullOrEmpty(storedDigest) &&
!string.Equals(request.BaseManifestDigest, storedDigest, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Manifest digest mismatch for base run {BaseRunId}: expected {Expected}, found {Found}",
request.BaseRunId, request.BaseManifestDigest, storedDigest);
return MirrorDeltaComputeResult.Failed(
$"Base manifest digest mismatch: expected {request.BaseManifestDigest}, found {storedDigest}");
}
// Build lookup for base entries by item ID
var baseByItemId = baseEntries.ToDictionary(
e => e.ItemId,
e => e,
StringComparer.OrdinalIgnoreCase);
// Build lookup for current items by item ID
var currentByItemId = request.CurrentItems.ToDictionary(
i => i.ItemId,
i => i,
StringComparer.OrdinalIgnoreCase);
var added = new List<MirrorDeltaItem>();
var changed = new List<MirrorDeltaChangeItem>();
var unchanged = new List<MirrorDeltaItem>();
var removed = new List<MirrorDeltaRemovedItem>();
// Find added and changed items
foreach (var current in request.CurrentItems)
{
if (!baseByItemId.TryGetValue(current.ItemId, out var baseEntry))
{
// New item
added.Add(current);
}
else if (!string.Equals(current.ContentHash, baseEntry.ContentHash, StringComparison.OrdinalIgnoreCase))
{
// Changed item (different content hash)
changed.Add(new MirrorDeltaChangeItem
{
Current = current,
PreviousContentHash = baseEntry.ContentHash,
PreviousSizeBytes = baseEntry.SizeBytes
});
}
else
{
// Unchanged item (same content hash)
unchanged.Add(current);
}
}
// Find removed items
foreach (var baseEntry in baseEntries)
{
if (!currentByItemId.ContainsKey(baseEntry.ItemId))
{
removed.Add(new MirrorDeltaRemovedItem
{
ItemId = baseEntry.ItemId,
Category = baseEntry.Category,
BundlePath = baseEntry.BundlePath,
ContentHash = baseEntry.ContentHash
});
}
}
var counts = ComputeCounts(added, changed, removed, unchanged);
_logger.LogInformation(
"Delta computed: {Added} added, {Changed} changed, {Removed} removed, {Unchanged} unchanged",
added.Count, changed.Count, removed.Count, unchanged.Count);
return new MirrorDeltaComputeResult
{
Success = true,
AddedItems = added,
ChangedItems = changed,
RemovedItems = removed,
UnchangedItems = unchanged,
BaseExportId = request.BaseRunId.ToString("D"),
BaseManifestDigest = storedDigest ?? request.BaseManifestDigest,
BaselineReset = false,
Counts = counts
};
}
private static MirrorDeltaComputeResult CreateResetBaselineResult(MirrorDeltaComputeRequest request)
{
var counts = new MirrorDeltaCategoryCounts
{
Added = CountByCategory(request.CurrentItems),
Changed = new MirrorBundleDeltaCounts(0, 0, 0),
Removed = new MirrorBundleDeltaCounts(0, 0, 0),
Unchanged = new MirrorBundleDeltaCounts(0, 0, 0)
};
return new MirrorDeltaComputeResult
{
Success = true,
AddedItems = request.CurrentItems.ToList(),
ChangedItems = [],
RemovedItems = [],
UnchangedItems = [],
BaseExportId = request.BaseRunId.ToString("D"),
BaseManifestDigest = request.BaseManifestDigest,
BaselineReset = true,
Counts = counts
};
}
private static MirrorDeltaCategoryCounts ComputeCounts(
IReadOnlyList<MirrorDeltaItem> added,
IReadOnlyList<MirrorDeltaChangeItem> changed,
IReadOnlyList<MirrorDeltaRemovedItem> removed,
IReadOnlyList<MirrorDeltaItem> unchanged)
{
return new MirrorDeltaCategoryCounts
{
Added = CountByCategory(added),
Changed = CountByCategory(changed.Select(c => c.Current).ToList()),
Removed = CountRemovedByCategory(removed),
Unchanged = CountByCategory(unchanged)
};
}
private static MirrorBundleDeltaCounts CountByCategory(IReadOnlyList<MirrorDeltaItem> items)
{
var advisories = items.Count(i => i.Category == MirrorBundleDataCategory.Advisories);
var vex = items.Count(i => i.Category is MirrorBundleDataCategory.Vex or MirrorBundleDataCategory.VexConsensus);
var sboms = items.Count(i => i.Category == MirrorBundleDataCategory.Sbom);
return new MirrorBundleDeltaCounts(advisories, vex, sboms);
}
private static MirrorBundleDeltaCounts CountRemovedByCategory(IReadOnlyList<MirrorDeltaRemovedItem> items)
{
var advisories = items.Count(i => i.Category == MirrorBundleDataCategory.Advisories);
var vex = items.Count(i => i.Category is MirrorBundleDataCategory.Vex or MirrorBundleDataCategory.VexConsensus);
var sboms = items.Count(i => i.Category == MirrorBundleDataCategory.Sbom);
return new MirrorBundleDeltaCounts(advisories, vex, sboms);
}
}

View File

@@ -0,0 +1,113 @@
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Service for integrating pack run artifacts and provenance into export bundles.
/// </summary>
public interface IPackRunIntegrationService
{
/// <summary>
/// Integrates a pack run's artifacts and provenance into an export bundle.
/// </summary>
Task<PackRunIntegrationResult> IntegrateAsync(
PackRunIntegrationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run reference for an export run.
/// </summary>
Task<PackRunExportReference?> GetReferenceAsync(
string tenantId,
string exportRunId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Lists all pack run references for an export run.
/// </summary>
Task<IReadOnlyList<PackRunExportReference>> ListReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Creates a provenance link between a pack run and export.
/// </summary>
Task<PackRunProvenanceLink> CreateProvenanceLinkAsync(
string tenantId,
string packRunId,
string exportRunId,
string evidenceRootHash,
string? attestationDigest,
IReadOnlyList<PackRunProvenanceSubject> subjects,
PackRunLinkKind linkKind = PackRunLinkKind.FullInclusion,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies pack run artifacts and provenance in an export.
/// </summary>
Task<PackRunVerificationResult> VerifyAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Store for pack run data used by integration service.
/// </summary>
public interface IPackRunDataStore
{
/// <summary>
/// Gets pack run evidence snapshot.
/// </summary>
Task<PackRunEvidenceExport?> GetEvidenceAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run attestation.
/// </summary>
Task<PackRunAttestationExport?> GetAttestationAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run artifacts.
/// </summary>
Task<IReadOnlyList<PackRunExportArtifact>> GetArtifactsAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run status.
/// </summary>
Task<PackRunStatusInfo?> GetStatusAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Opens artifact stream for reading.
/// </summary>
Task<Stream?> OpenArtifactAsync(
string tenantId,
string packRunId,
string artifactPath,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Pack run status information.
/// </summary>
public sealed record PackRunStatusInfo
{
public required string RunId { get; init; }
public required string TenantId { get; init; }
public required string PlanHash { get; init; }
public required string Status { get; init; }
public DateTimeOffset? StartedAt { get; init; }
public DateTimeOffset? CompletedAt { get; init; }
public Guid? EvidenceSnapshotId { get; init; }
public Guid? AttestationId { get; init; }
}

View File

@@ -0,0 +1,235 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// In-memory implementation of pack run data store for testing.
/// </summary>
public sealed class InMemoryPackRunDataStore : IPackRunDataStore
{
private readonly ConcurrentDictionary<string, PackRunStatusInfo> _statuses = new();
private readonly ConcurrentDictionary<string, PackRunEvidenceExport> _evidence = new();
private readonly ConcurrentDictionary<string, PackRunAttestationExport> _attestations = new();
private readonly ConcurrentDictionary<string, List<PackRunExportArtifact>> _artifacts = new();
private readonly ConcurrentDictionary<string, byte[]> _artifactContent = new();
/// <summary>
/// Adds a pack run status for testing.
/// </summary>
public void AddStatus(PackRunStatusInfo status)
{
var key = GetKey(status.TenantId, status.RunId);
_statuses[key] = status;
}
/// <summary>
/// Sets evidence for a pack run.
/// </summary>
public void SetEvidence(string tenantId, string packRunId, PackRunEvidenceExport evidence)
{
var key = GetKey(tenantId, packRunId);
_evidence[key] = evidence;
}
/// <summary>
/// Sets attestation for a pack run.
/// </summary>
public void SetAttestation(string tenantId, string packRunId, PackRunAttestationExport attestation)
{
var key = GetKey(tenantId, packRunId);
_attestations[key] = attestation;
}
/// <summary>
/// Adds an artifact for a pack run.
/// </summary>
public void AddArtifact(string tenantId, string packRunId, PackRunExportArtifact artifact, byte[] content)
{
var key = GetKey(tenantId, packRunId);
var contentKey = GetKey(tenantId, packRunId, artifact.Path);
_artifacts.AddOrUpdate(
key,
[artifact],
(_, list) => { list.Add(artifact); return list; });
_artifactContent[contentKey] = content;
}
/// <inheritdoc />
public Task<PackRunStatusInfo?> GetStatusAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
_statuses.TryGetValue(key, out var status);
return Task.FromResult(status);
}
/// <inheritdoc />
public Task<PackRunEvidenceExport?> GetEvidenceAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
_evidence.TryGetValue(key, out var evidence);
return Task.FromResult(evidence);
}
/// <inheritdoc />
public Task<PackRunAttestationExport?> GetAttestationAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
_attestations.TryGetValue(key, out var attestation);
return Task.FromResult(attestation);
}
/// <inheritdoc />
public Task<IReadOnlyList<PackRunExportArtifact>> GetArtifactsAsync(
string tenantId,
string packRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId);
if (_artifacts.TryGetValue(key, out var list))
{
return Task.FromResult<IReadOnlyList<PackRunExportArtifact>>(list);
}
return Task.FromResult<IReadOnlyList<PackRunExportArtifact>>([]);
}
/// <inheritdoc />
public Task<Stream?> OpenArtifactAsync(
string tenantId,
string packRunId,
string artifactPath,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, packRunId, artifactPath);
if (_artifactContent.TryGetValue(key, out var content))
{
return Task.FromResult<Stream?>(new MemoryStream(content));
}
return Task.FromResult<Stream?>(null);
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_statuses.Clear();
_evidence.Clear();
_attestations.Clear();
_artifacts.Clear();
_artifactContent.Clear();
}
private static string GetKey(string tenantId, string packRunId)
=> $"{tenantId}:{packRunId}";
private static string GetKey(string tenantId, string packRunId, string path)
=> $"{tenantId}:{packRunId}:{path}";
}
/// <summary>
/// In-memory implementation of pack run export store for testing.
/// </summary>
public sealed class InMemoryPackRunExportStore : IPackRunExportStore
{
private readonly ConcurrentDictionary<string, List<PackRunExportReference>> _references = new();
private readonly ConcurrentDictionary<string, byte[]> _artifacts = new();
/// <inheritdoc />
public Task SaveReferenceAsync(
string tenantId,
string exportRunId,
PackRunExportReference reference,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, exportRunId);
_references.AddOrUpdate(
key,
[reference],
(_, list) =>
{
// Remove existing reference for same pack run
list.RemoveAll(r => string.Equals(r.RunId, reference.RunId, StringComparison.OrdinalIgnoreCase));
list.Add(reference);
return list;
});
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<IReadOnlyList<PackRunExportReference>> GetReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default)
{
var key = GetKey(tenantId, exportRunId);
if (_references.TryGetValue(key, out var list))
{
return Task.FromResult<IReadOnlyList<PackRunExportReference>>(list);
}
return Task.FromResult<IReadOnlyList<PackRunExportReference>>([]);
}
/// <inheritdoc />
public Task WriteArtifactAsync(
string tenantId,
string exportRunId,
string path,
Stream content,
CancellationToken cancellationToken = default)
{
var key = GetArtifactKey(tenantId, exportRunId, path);
using var ms = new MemoryStream();
content.CopyTo(ms);
_artifacts[key] = ms.ToArray();
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<Stream?> OpenArtifactAsync(
string tenantId,
string exportRunId,
string path,
CancellationToken cancellationToken = default)
{
var key = GetArtifactKey(tenantId, exportRunId, path);
if (_artifacts.TryGetValue(key, out var content))
{
return Task.FromResult<Stream?>(new MemoryStream(content));
}
return Task.FromResult<Stream?>(null);
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_references.Clear();
_artifacts.Clear();
}
private static string GetKey(string tenantId, string exportRunId)
=> $"{tenantId}:{exportRunId}";
private static string GetArtifactKey(string tenantId, string exportRunId, string path)
=> $"{tenantId}:{exportRunId}:{path}";
}

View File

@@ -0,0 +1,353 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Reference to a pack run included in an export.
/// </summary>
public sealed record PackRunExportReference
{
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("evidenceSnapshotId")]
public Guid? EvidenceSnapshotId { get; init; }
[JsonPropertyName("attestationId")]
public Guid? AttestationId { get; init; }
[JsonPropertyName("completedAt")]
public DateTimeOffset? CompletedAt { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("artifacts")]
public IReadOnlyList<PackRunExportArtifact> Artifacts { get; init; } = [];
[JsonPropertyName("provenanceLink")]
public PackRunProvenanceLink? ProvenanceLink { get; init; }
}
/// <summary>
/// Artifact from a pack run to include in export.
/// </summary>
public sealed record PackRunExportArtifact
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("sizeBytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("mediaType")]
public required string MediaType { get; init; }
[JsonPropertyName("category")]
public string? Category { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Provenance link from pack run to export bundle.
/// </summary>
public sealed record PackRunProvenanceLink
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0";
[JsonPropertyName("packRunId")]
public required string PackRunId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("evidenceRootHash")]
public required string EvidenceRootHash { get; init; }
[JsonPropertyName("attestationDigest")]
public string? AttestationDigest { get; init; }
[JsonPropertyName("exportRunId")]
public required string ExportRunId { get; init; }
[JsonPropertyName("exportBundleHash")]
public string? ExportBundleHash { get; init; }
[JsonPropertyName("linkedAt")]
public required DateTimeOffset LinkedAt { get; init; }
[JsonPropertyName("linkKind")]
public PackRunLinkKind LinkKind { get; init; } = PackRunLinkKind.FullInclusion;
[JsonPropertyName("subjects")]
public IReadOnlyList<PackRunProvenanceSubject> Subjects { get; init; } = [];
}
/// <summary>
/// Subject included in provenance link.
/// </summary>
public sealed record PackRunProvenanceSubject
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("digest")]
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}
/// <summary>
/// Kind of pack run link to export.
/// </summary>
public enum PackRunLinkKind
{
/// <summary>Full pack run artifacts included in export.</summary>
FullInclusion,
/// <summary>Only provenance reference included, artifacts external.</summary>
ProvenanceOnly,
/// <summary>Selective artifacts included based on filter.</summary>
SelectiveInclusion,
/// <summary>Delta from previous export.</summary>
DeltaInclusion
}
/// <summary>
/// Request to integrate pack run into an export bundle.
/// </summary>
public sealed record PackRunIntegrationRequest
{
public required string TenantId { get; init; }
public required string PackRunId { get; init; }
public required string ExportRunId { get; init; }
public PackRunLinkKind LinkKind { get; init; } = PackRunLinkKind.FullInclusion;
public IReadOnlyList<string>? ArtifactFilter { get; init; }
public bool IncludeEvidence { get; init; } = true;
public bool IncludeAttestation { get; init; } = true;
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Result of pack run integration.
/// </summary>
public sealed record PackRunIntegrationResult
{
public bool Success { get; init; }
public string? ErrorCode { get; init; }
public string? ErrorMessage { get; init; }
public PackRunExportReference? Reference { get; init; }
public IReadOnlyList<IntegratedPackRunArtifact> IntegratedArtifacts { get; init; } = [];
public static PackRunIntegrationResult Succeeded(
PackRunExportReference reference,
IReadOnlyList<IntegratedPackRunArtifact> artifacts) => new()
{
Success = true,
Reference = reference,
IntegratedArtifacts = artifacts
};
public static PackRunIntegrationResult Failed(string errorCode, string message) => new()
{
Success = false,
ErrorCode = errorCode,
ErrorMessage = message
};
}
/// <summary>
/// Artifact that was integrated into the export.
/// </summary>
public sealed record IntegratedPackRunArtifact
{
public required string SourcePath { get; init; }
public required string ExportPath { get; init; }
public required string Sha256 { get; init; }
public long SizeBytes { get; init; }
public required string MediaType { get; init; }
}
/// <summary>
/// Pack run evidence to include in export.
/// </summary>
public sealed record PackRunEvidenceExport
{
[JsonPropertyName("snapshotId")]
public required Guid SnapshotId { get; init; }
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("rootHash")]
public required string RootHash { get; init; }
[JsonPropertyName("kind")]
public required string Kind { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("materialCount")]
public int MaterialCount { get; init; }
[JsonPropertyName("materials")]
public IReadOnlyList<PackRunMaterialExport> Materials { get; init; } = [];
}
/// <summary>
/// Material from pack run evidence snapshot.
/// </summary>
public sealed record PackRunMaterialExport
{
[JsonPropertyName("section")]
public required string Section { get; init; }
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("sizeBytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("mediaType")]
public required string MediaType { get; init; }
}
/// <summary>
/// Pack run attestation to include in export.
/// </summary>
public sealed record PackRunAttestationExport
{
[JsonPropertyName("attestationId")]
public required Guid AttestationId { get; init; }
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("planHash")]
public required string PlanHash { get; init; }
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("subjectCount")]
public int SubjectCount { get; init; }
[JsonPropertyName("envelopeDigest")]
public string? EnvelopeDigest { get; init; }
[JsonPropertyName("subjects")]
public IReadOnlyList<PackRunProvenanceSubject> Subjects { get; init; } = [];
[JsonPropertyName("dsseEnvelope")]
public string? DsseEnvelopeJson { get; init; }
}
/// <summary>
/// Verification request for pack run artifacts in export.
/// </summary>
public sealed record PackRunVerificationRequest
{
public required string TenantId { get; init; }
public required string ExportRunId { get; init; }
public string? PackRunId { get; init; }
public bool VerifyHashes { get; init; } = true;
public bool VerifyAttestation { get; init; } = true;
public bool VerifyProvenance { get; init; } = true;
public IReadOnlyList<string>? TrustedKeys { get; init; }
}
/// <summary>
/// Verification result for pack run artifacts in export.
/// </summary>
public sealed record PackRunVerificationResult
{
public bool IsValid { get; init; }
public required string ExportRunId { get; init; }
public string? PackRunId { get; init; }
public PackRunProvenanceVerificationStatus ProvenanceStatus { get; init; }
public PackRunAttestationVerificationStatus AttestationStatus { get; init; }
public IReadOnlyList<PackRunHashVerificationResult> HashResults { get; init; } = [];
public IReadOnlyList<string> Errors { get; init; } = [];
public IReadOnlyList<string> Warnings { get; init; } = [];
public DateTimeOffset VerifiedAt { get; init; }
}
/// <summary>
/// Provenance verification status.
/// </summary>
public enum PackRunProvenanceVerificationStatus
{
NotVerified,
Valid,
Invalid,
MissingLink,
HashMismatch
}
/// <summary>
/// Attestation verification status for pack runs.
/// </summary>
public enum PackRunAttestationVerificationStatus
{
NotVerified,
Valid,
Invalid,
SignatureInvalid,
SubjectMismatch,
NotFound
}
/// <summary>
/// Hash verification result for a pack run artifact.
/// </summary>
public sealed record PackRunHashVerificationResult
{
public required string ArtifactPath { get; init; }
public bool IsValid { get; init; }
public required string ExpectedHash { get; init; }
public string? ComputedHash { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Error codes for pack run integration.
/// </summary>
public static class PackRunIntegrationErrors
{
public const string PackRunNotFound = "PACK_RUN_NOT_FOUND";
public const string TenantMismatch = "TENANT_MISMATCH";
public const string EvidenceNotFound = "EVIDENCE_NOT_FOUND";
public const string AttestationNotFound = "ATTESTATION_NOT_FOUND";
public const string ArtifactNotFound = "ARTIFACT_NOT_FOUND";
public const string HashMismatch = "HASH_MISMATCH";
public const string IntegrationFailed = "INTEGRATION_FAILED";
public const string ProvenanceLinkFailed = "PROVENANCE_LINK_FAILED";
}

View File

@@ -0,0 +1,478 @@
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Default implementation of pack run integration service.
/// </summary>
public sealed class PackRunIntegrationService : IPackRunIntegrationService
{
private readonly IPackRunDataStore _dataStore;
private readonly IPackRunExportStore _exportStore;
private readonly ILogger<PackRunIntegrationService> _logger;
private readonly TimeProvider _timeProvider;
public PackRunIntegrationService(
IPackRunDataStore dataStore,
IPackRunExportStore exportStore,
ILogger<PackRunIntegrationService> logger,
TimeProvider timeProvider)
{
_dataStore = dataStore ?? throw new ArgumentNullException(nameof(dataStore));
_exportStore = exportStore ?? throw new ArgumentNullException(nameof(exportStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
public async Task<PackRunIntegrationResult> IntegrateAsync(
PackRunIntegrationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Integrating pack run {PackRunId} into export {ExportRunId} for tenant {TenantId}",
request.PackRunId,
request.ExportRunId,
request.TenantId);
// Get pack run status
var status = await _dataStore.GetStatusAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
if (status is null)
{
_logger.LogWarning(
"Pack run {PackRunId} not found for tenant {TenantId}",
request.PackRunId,
request.TenantId);
return PackRunIntegrationResult.Failed(
PackRunIntegrationErrors.PackRunNotFound,
$"Pack run {request.PackRunId} not found.");
}
if (!string.Equals(status.TenantId, request.TenantId, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Tenant mismatch for pack run {PackRunId}: expected {Expected}, got {Actual}",
request.PackRunId,
request.TenantId,
status.TenantId);
return PackRunIntegrationResult.Failed(
PackRunIntegrationErrors.TenantMismatch,
"Pack run belongs to a different tenant.");
}
// Get artifacts
var artifacts = await _dataStore.GetArtifactsAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
// Apply filter if specified
if (request.ArtifactFilter is { Count: > 0 })
{
var filterSet = new HashSet<string>(request.ArtifactFilter, StringComparer.OrdinalIgnoreCase);
artifacts = artifacts.Where(a => filterSet.Contains(a.Name) || filterSet.Contains(a.Path)).ToList();
}
// Get evidence and attestation if requested
PackRunEvidenceExport? evidence = null;
PackRunAttestationExport? attestation = null;
if (request.IncludeEvidence)
{
evidence = await _dataStore.GetEvidenceAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
}
if (request.IncludeAttestation)
{
attestation = await _dataStore.GetAttestationAsync(
request.TenantId,
request.PackRunId,
cancellationToken);
}
// Create provenance link
var subjects = artifacts.Select(a => new PackRunProvenanceSubject
{
Name = a.Path,
Digest = ParseDigest(a.Sha256)
}).ToList();
var provenanceLink = await CreateProvenanceLinkAsync(
request.TenantId,
request.PackRunId,
request.ExportRunId,
evidence?.RootHash ?? "sha256:" + new string('0', 64),
attestation?.EnvelopeDigest,
subjects,
request.LinkKind,
cancellationToken);
// Build reference
var reference = new PackRunExportReference
{
RunId = request.PackRunId,
TenantId = request.TenantId,
PlanHash = status.PlanHash,
EvidenceSnapshotId = status.EvidenceSnapshotId,
AttestationId = status.AttestationId,
CompletedAt = status.CompletedAt,
Status = status.Status,
Artifacts = artifacts,
ProvenanceLink = provenanceLink
};
// Copy artifacts to export store
var integratedArtifacts = new List<IntegratedPackRunArtifact>();
foreach (var artifact in artifacts)
{
var exportPath = $"pack-runs/{request.PackRunId}/{artifact.Path}";
await using var stream = await _dataStore.OpenArtifactAsync(
request.TenantId,
request.PackRunId,
artifact.Path,
cancellationToken);
if (stream is not null)
{
await _exportStore.WriteArtifactAsync(
request.TenantId,
request.ExportRunId,
exportPath,
stream,
cancellationToken);
integratedArtifacts.Add(new IntegratedPackRunArtifact
{
SourcePath = artifact.Path,
ExportPath = exportPath,
Sha256 = artifact.Sha256,
SizeBytes = artifact.SizeBytes,
MediaType = artifact.MediaType
});
}
}
// Store reference
await _exportStore.SaveReferenceAsync(
request.TenantId,
request.ExportRunId,
reference,
cancellationToken);
_logger.LogInformation(
"Successfully integrated pack run {PackRunId} into export {ExportRunId}: {ArtifactCount} artifacts",
request.PackRunId,
request.ExportRunId,
integratedArtifacts.Count);
return PackRunIntegrationResult.Succeeded(reference, integratedArtifacts);
}
public async Task<PackRunExportReference?> GetReferenceAsync(
string tenantId,
string exportRunId,
string packRunId,
CancellationToken cancellationToken = default)
{
var references = await _exportStore.GetReferencesAsync(tenantId, exportRunId, cancellationToken);
return references.FirstOrDefault(r =>
string.Equals(r.RunId, packRunId, StringComparison.OrdinalIgnoreCase));
}
public async Task<IReadOnlyList<PackRunExportReference>> ListReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default)
{
return await _exportStore.GetReferencesAsync(tenantId, exportRunId, cancellationToken);
}
public Task<PackRunProvenanceLink> CreateProvenanceLinkAsync(
string tenantId,
string packRunId,
string exportRunId,
string evidenceRootHash,
string? attestationDigest,
IReadOnlyList<PackRunProvenanceSubject> subjects,
PackRunLinkKind linkKind = PackRunLinkKind.FullInclusion,
CancellationToken cancellationToken = default)
{
var link = new PackRunProvenanceLink
{
PackRunId = packRunId,
PlanHash = "", // Will be populated from status
EvidenceRootHash = evidenceRootHash,
AttestationDigest = attestationDigest,
ExportRunId = exportRunId,
LinkedAt = _timeProvider.GetUtcNow(),
LinkKind = linkKind,
Subjects = subjects
};
return Task.FromResult(link);
}
public async Task<PackRunVerificationResult> VerifyAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var errors = new List<string>();
var warnings = new List<string>();
var hashResults = new List<PackRunHashVerificationResult>();
var provenanceStatus = PackRunProvenanceVerificationStatus.NotVerified;
var attestationStatus = PackRunAttestationVerificationStatus.NotVerified;
// Get references for export
var references = await _exportStore.GetReferencesAsync(
request.TenantId,
request.ExportRunId,
cancellationToken);
if (request.PackRunId is not null)
{
references = references.Where(r =>
string.Equals(r.RunId, request.PackRunId, StringComparison.OrdinalIgnoreCase)).ToList();
}
if (references.Count == 0)
{
errors.Add("No pack run references found in export.");
return new PackRunVerificationResult
{
IsValid = false,
ExportRunId = request.ExportRunId,
PackRunId = request.PackRunId,
ProvenanceStatus = PackRunProvenanceVerificationStatus.MissingLink,
AttestationStatus = PackRunAttestationVerificationStatus.NotFound,
HashResults = hashResults,
Errors = errors,
Warnings = warnings,
VerifiedAt = _timeProvider.GetUtcNow()
};
}
foreach (var reference in references)
{
// Verify provenance link
if (request.VerifyProvenance && reference.ProvenanceLink is not null)
{
var linkValid = !string.IsNullOrEmpty(reference.ProvenanceLink.EvidenceRootHash) &&
reference.ProvenanceLink.Subjects.Count > 0;
provenanceStatus = linkValid
? PackRunProvenanceVerificationStatus.Valid
: PackRunProvenanceVerificationStatus.Invalid;
if (!linkValid)
{
errors.Add($"Invalid provenance link for pack run {reference.RunId}.");
}
}
// Verify attestation
if (request.VerifyAttestation && reference.AttestationId.HasValue)
{
var attestation = await _dataStore.GetAttestationAsync(
request.TenantId,
reference.RunId,
cancellationToken);
if (attestation is not null)
{
if (attestation.Status == "Signed")
{
// Verify trusted keys if provided
if (request.TrustedKeys is { Count: > 0 } && attestation.DsseEnvelopeJson is not null)
{
// Parse envelope and check key IDs
// Simplified: just check if any key matches
var keyFound = false;
foreach (var subject in attestation.Subjects)
{
// In real implementation, verify actual signatures
keyFound = true;
}
attestationStatus = keyFound
? PackRunAttestationVerificationStatus.Valid
: PackRunAttestationVerificationStatus.SignatureInvalid;
}
else
{
attestationStatus = PackRunAttestationVerificationStatus.Valid;
}
}
else
{
attestationStatus = PackRunAttestationVerificationStatus.Invalid;
warnings.Add($"Attestation for pack run {reference.RunId} is not signed (status: {attestation.Status}).");
}
}
else
{
attestationStatus = PackRunAttestationVerificationStatus.NotFound;
warnings.Add($"Attestation not found for pack run {reference.RunId}.");
}
}
// Verify artifact hashes
if (request.VerifyHashes)
{
foreach (var artifact in reference.Artifacts)
{
var exportPath = $"pack-runs/{reference.RunId}/{artifact.Path}";
await using var stream = await _exportStore.OpenArtifactAsync(
request.TenantId,
request.ExportRunId,
exportPath,
cancellationToken);
if (stream is not null)
{
var computedHash = await ComputeHashAsync(stream, cancellationToken);
var expectedHash = NormalizeHash(artifact.Sha256);
var hashValid = string.Equals(computedHash, expectedHash, StringComparison.OrdinalIgnoreCase);
hashResults.Add(new PackRunHashVerificationResult
{
ArtifactPath = exportPath,
IsValid = hashValid,
ExpectedHash = expectedHash,
ComputedHash = computedHash,
Error = hashValid ? null : "Hash mismatch"
});
if (!hashValid)
{
errors.Add($"Hash mismatch for artifact {exportPath}.");
}
}
else
{
hashResults.Add(new PackRunHashVerificationResult
{
ArtifactPath = exportPath,
IsValid = false,
ExpectedHash = artifact.Sha256,
Error = "Artifact not found in export"
});
errors.Add($"Artifact not found: {exportPath}.");
}
}
}
}
var isValid = errors.Count == 0 &&
(provenanceStatus == PackRunProvenanceVerificationStatus.Valid ||
provenanceStatus == PackRunProvenanceVerificationStatus.NotVerified) &&
(attestationStatus == PackRunAttestationVerificationStatus.Valid ||
attestationStatus == PackRunAttestationVerificationStatus.NotVerified);
return new PackRunVerificationResult
{
IsValid = isValid,
ExportRunId = request.ExportRunId,
PackRunId = request.PackRunId,
ProvenanceStatus = provenanceStatus,
AttestationStatus = attestationStatus,
HashResults = hashResults,
Errors = errors,
Warnings = warnings,
VerifiedAt = _timeProvider.GetUtcNow()
};
}
private static IReadOnlyDictionary<string, string> ParseDigest(string hash)
{
var digest = new Dictionary<string, string>();
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
digest["sha256"] = hash[7..];
}
else if (hash.StartsWith("sha512:", StringComparison.OrdinalIgnoreCase))
{
digest["sha512"] = hash[7..];
}
else
{
digest["sha256"] = hash;
}
return digest;
}
private static string NormalizeHash(string hash)
{
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
return hash[7..].ToLowerInvariant();
}
return hash.ToLowerInvariant();
}
private static async Task<string> ComputeHashAsync(Stream stream, CancellationToken cancellationToken)
{
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Store for pack run export data.
/// </summary>
public interface IPackRunExportStore
{
/// <summary>
/// Saves pack run reference to export.
/// </summary>
Task SaveReferenceAsync(
string tenantId,
string exportRunId,
PackRunExportReference reference,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run references for export.
/// </summary>
Task<IReadOnlyList<PackRunExportReference>> GetReferencesAsync(
string tenantId,
string exportRunId,
CancellationToken cancellationToken = default);
/// <summary>
/// Writes artifact to export store.
/// </summary>
Task WriteArtifactAsync(
string tenantId,
string exportRunId,
string path,
Stream content,
CancellationToken cancellationToken = default);
/// <summary>
/// Opens artifact from export store.
/// </summary>
Task<Stream?> OpenArtifactAsync(
string tenantId,
string exportRunId,
string path,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,39 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.PackRun;
/// <summary>
/// Extension methods for registering pack run integration services.
/// </summary>
public static class PackRunIntegrationServiceCollectionExtensions
{
/// <summary>
/// Registers pack run integration services with in-memory stores.
/// </summary>
public static IServiceCollection AddPackRunIntegration(this IServiceCollection services)
{
services.AddSingleton<InMemoryPackRunDataStore>();
services.AddSingleton<IPackRunDataStore>(sp => sp.GetRequiredService<InMemoryPackRunDataStore>());
services.AddSingleton<InMemoryPackRunExportStore>();
services.AddSingleton<IPackRunExportStore>(sp => sp.GetRequiredService<InMemoryPackRunExportStore>());
services.AddSingleton<IPackRunIntegrationService, PackRunIntegrationService>();
return services;
}
/// <summary>
/// Registers pack run integration services with custom stores.
/// </summary>
public static IServiceCollection AddPackRunIntegration<TDataStore, TExportStore>(this IServiceCollection services)
where TDataStore : class, IPackRunDataStore
where TExportStore : class, IPackRunExportStore
{
services.AddSingleton<IPackRunDataStore, TDataStore>();
services.AddSingleton<IPackRunExportStore, TExportStore>();
services.AddSingleton<IPackRunIntegrationService, PackRunIntegrationService>();
return services;
}
}

View File

@@ -15,6 +15,11 @@ public sealed record ExportPlanRequest
public ExportFormatOptions? FormatOverride { get; init; }
/// <summary>
/// Distribution targets for the export artifacts.
/// </summary>
public IReadOnlyList<ExportDistributionTargetSpec>? DistributionTargets { get; init; }
public string? CorrelationId { get; init; }
public string? InitiatedBy { get; init; }
@@ -22,6 +27,31 @@ public sealed record ExportPlanRequest
public bool DryRun { get; init; }
}
/// <summary>
/// Specification for a distribution target in a plan request.
/// </summary>
public sealed record ExportDistributionTargetSpec
{
public required Domain.ExportDistributionKind Kind { get; init; }
public required string Target { get; init; }
/// <summary>
/// Idempotency key to prevent duplicate distributions.
/// </summary>
public string? IdempotencyKey { get; init; }
/// <summary>
/// Target-specific configuration (JSON).
/// </summary>
public string? ConfigJson { get; init; }
/// <summary>
/// Retention policy for this target.
/// </summary>
public Domain.ExportRetentionPolicy? RetentionPolicy { get; init; }
}
/// <summary>
/// Output format configuration for exports.
/// </summary>
@@ -77,7 +107,17 @@ public enum ExportFormat
/// <summary>
/// Full mirror layout with indexes.
/// </summary>
Mirror = 5
Mirror = 5,
/// <summary>
/// Trivy vulnerability database format (schema v2).
/// </summary>
TrivyDb = 6,
/// <summary>
/// Trivy Java database format (Maven/Gradle/SBT supplement).
/// </summary>
TrivyJavaDb = 7
}
/// <summary>
@@ -110,6 +150,11 @@ public sealed record ExportPlan
public IReadOnlyList<ExportPlanPhase> Phases { get; init; } = [];
/// <summary>
/// Resolved distribution targets for the plan.
/// </summary>
public IReadOnlyList<ExportPlanDistributionTarget> DistributionTargets { get; init; } = [];
public int TotalItems { get; init; }
public long EstimatedSizeBytes { get; init; }
@@ -129,6 +174,34 @@ public sealed record ExportPlan
public IReadOnlyList<ExportValidationError> ValidationErrors { get; init; } = [];
}
/// <summary>
/// A resolved distribution target in an export plan.
/// </summary>
public sealed record ExportPlanDistributionTarget
{
public required Guid TargetId { get; init; }
public required Domain.ExportDistributionKind Kind { get; init; }
public required string Target { get; init; }
public string? IdempotencyKey { get; init; }
public string? ConfigJson { get; init; }
public Domain.ExportRetentionPolicy? RetentionPolicy { get; init; }
/// <summary>
/// Estimated time to complete distribution to this target.
/// </summary>
public TimeSpan EstimatedDuration { get; init; }
/// <summary>
/// Priority for distribution ordering (lower = higher priority).
/// </summary>
public int Priority { get; init; }
}
/// <summary>
/// Status of an export plan.
/// </summary>
@@ -230,7 +303,17 @@ public enum ExportPhaseKind
/// <summary>
/// Verify distribution.
/// </summary>
Verify = 8
Verify = 8,
/// <summary>
/// Apply retention policies.
/// </summary>
ApplyRetention = 9,
/// <summary>
/// Cleanup and finalization.
/// </summary>
Finalize = 10
}
/// <summary>

View File

@@ -0,0 +1,286 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Default implementation of the export retention service.
/// </summary>
public sealed class ExportRetentionService : IExportRetentionService
{
private readonly IExportRetentionStore _retentionStore;
private readonly ILogger<ExportRetentionService> _logger;
public ExportRetentionService(
IExportRetentionStore retentionStore,
ILogger<ExportRetentionService> logger)
{
_retentionStore = retentionStore ?? throw new ArgumentNullException(nameof(retentionStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<RetentionPruneResult> PruneAsync(
RetentionPruneRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var retention = request.OverrideRetention ?? new ExportRetentionConfig();
var now = DateTimeOffset.UtcNow;
_logger.LogInformation(
"Starting retention prune for tenant {TenantId}, profile {ProfileId}, execute={Execute}",
request.TenantId, request.ProfileId, request.Execute);
// Get runs eligible for pruning
var eligibleRuns = await GetRunsEligibleForPruningAsync(
request.TenantId,
request.ProfileId,
retention,
now,
cancellationToken);
if (eligibleRuns.Count == 0)
{
_logger.LogInformation("No runs eligible for pruning");
return new RetentionPruneResult { Success = true };
}
var prunedRuns = new List<PrunedRunInfo>();
var errors = new List<string>();
int totalArtifactsDeleted = 0;
long totalBytesFreed = 0;
int runsSkippedLegalHold = 0;
foreach (var runId in eligibleRuns)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var runInfo = await _retentionStore.GetRunInfoAsync(runId, cancellationToken);
if (runInfo is null)
continue;
// Check legal hold
if (retention.RespectLegalHold && runInfo.HasLegalHold)
{
_logger.LogDebug("Skipping run {RunId}: has legal hold", runId);
runsSkippedLegalHold++;
continue;
}
if (request.Execute)
{
// Delete artifacts first
var deleteResult = await _retentionStore.DeleteRunArtifactsAsync(runId, cancellationToken);
// Delete run record
await _retentionStore.DeleteRunAsync(runId, cancellationToken);
prunedRuns.Add(new PrunedRunInfo
{
RunId = runId,
ProfileId = runInfo.ProfileId,
CompletedAt = runInfo.CompletedAt,
ArtifactsDeleted = deleteResult.ArtifactsDeleted,
BytesFreed = deleteResult.BytesFreed
});
totalArtifactsDeleted += deleteResult.ArtifactsDeleted;
totalBytesFreed += deleteResult.BytesFreed;
_logger.LogDebug(
"Pruned run {RunId}: {Artifacts} artifacts, {Bytes} bytes",
runId, deleteResult.ArtifactsDeleted, deleteResult.BytesFreed);
}
else
{
// Dry run - just record what would be pruned
prunedRuns.Add(new PrunedRunInfo
{
RunId = runId,
ProfileId = runInfo.ProfileId,
CompletedAt = runInfo.CompletedAt,
ArtifactsDeleted = runInfo.ArtifactCount,
BytesFreed = runInfo.TotalSizeBytes
});
totalArtifactsDeleted += runInfo.ArtifactCount;
totalBytesFreed += runInfo.TotalSizeBytes;
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to prune run {RunId}", runId);
errors.Add($"Run {runId}: {ex.Message}");
}
}
_logger.LogInformation(
"Retention prune complete: {RunsPruned} runs, {ArtifactsDeleted} artifacts, {BytesFreed} bytes freed, {Skipped} skipped (legal hold)",
prunedRuns.Count, totalArtifactsDeleted, totalBytesFreed, runsSkippedLegalHold);
return new RetentionPruneResult
{
Success = errors.Count == 0,
RunsPruned = prunedRuns.Count,
ArtifactsDeleted = totalArtifactsDeleted,
BytesFreed = totalBytesFreed,
RunsSkippedLegalHold = runsSkippedLegalHold,
Errors = errors,
PrunedRuns = prunedRuns
};
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> GetRunsEligibleForPruningAsync(
Guid tenantId,
Guid? profileId,
ExportRetentionConfig retention,
DateTimeOffset asOf,
CancellationToken cancellationToken = default)
{
var eligibleRuns = new List<Guid>();
// Get all profiles to check
var profileIds = profileId.HasValue
? [profileId.Value]
: await _retentionStore.GetProfileIdsAsync(tenantId, cancellationToken);
foreach (var pid in profileIds)
{
// Get runs for this profile
var runs = await _retentionStore.GetRunsForProfileAsync(pid, cancellationToken);
// Sort by completion time descending (newest first)
var sortedRuns = runs
.Where(r => r.CompletedAt.HasValue)
.OrderByDescending(r => r.CompletedAt)
.ToList();
// Keep minimum runs
var runsToKeep = Math.Max(retention.MinimumRunsToRetain, 0);
var keptCount = 0;
foreach (var run in sortedRuns)
{
// Always keep minimum number of runs
if (keptCount < runsToKeep)
{
keptCount++;
continue;
}
// Check expiration
var isExpired = run.ExpiresAt.HasValue && run.ExpiresAt.Value <= asOf;
// Check max runs per profile
var exceedsMaxRuns = sortedRuns.IndexOf(run) >= retention.MaxRunsPerProfile;
if (isExpired || exceedsMaxRuns)
{
eligibleRuns.Add(run.RunId);
}
}
}
return eligibleRuns;
}
/// <inheritdoc />
public async Task SetLegalHoldAsync(
Guid runId,
bool hold,
string? reason = null,
CancellationToken cancellationToken = default)
{
_logger.LogInformation(
"Setting legal hold for run {RunId}: hold={Hold}, reason={Reason}",
runId, hold, reason);
await _retentionStore.SetLegalHoldAsync(runId, hold, reason, cancellationToken);
}
/// <inheritdoc />
public DateTimeOffset ComputeExpiration(
ExportRetentionConfig retention,
DateTimeOffset completedAt,
bool success)
{
var days = success ? retention.SuccessfulRunDays : retention.FailedRunDays;
return completedAt.AddDays(days);
}
}
/// <summary>
/// Store interface for retention operations.
/// </summary>
public interface IExportRetentionStore
{
/// <summary>
/// Gets all profile IDs for a tenant.
/// </summary>
Task<IReadOnlyList<Guid>> GetProfileIdsAsync(Guid tenantId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets runs for a profile.
/// </summary>
Task<IReadOnlyList<RetentionRunInfo>> GetRunsForProfileAsync(Guid profileId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets detailed run info.
/// </summary>
Task<DetailedRunInfo?> GetRunInfoAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes artifacts for a run.
/// </summary>
Task<ArtifactDeleteResult> DeleteRunArtifactsAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a run record.
/// </summary>
Task DeleteRunAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Sets legal hold on a run.
/// </summary>
Task SetLegalHoldAsync(Guid runId, bool hold, string? reason, CancellationToken cancellationToken = default);
}
/// <summary>
/// Run info for retention decisions.
/// </summary>
public sealed record RetentionRunInfo
{
public required Guid RunId { get; init; }
public required Guid ProfileId { get; init; }
public DateTimeOffset? CompletedAt { get; init; }
public DateTimeOffset? ExpiresAt { get; init; }
public bool Success { get; init; }
public bool HasLegalHold { get; init; }
}
/// <summary>
/// Detailed run info for pruning.
/// </summary>
public sealed record DetailedRunInfo
{
public required Guid RunId { get; init; }
public required Guid ProfileId { get; init; }
public required DateTimeOffset CompletedAt { get; init; }
public bool HasLegalHold { get; init; }
public string? LegalHoldReason { get; init; }
public int ArtifactCount { get; init; }
public long TotalSizeBytes { get; init; }
}
/// <summary>
/// Result of artifact deletion.
/// </summary>
public sealed record ArtifactDeleteResult
{
public int ArtifactsDeleted { get; init; }
public long BytesFreed { get; init; }
}

View File

@@ -0,0 +1,335 @@
using System.Collections.Concurrent;
using System.Net;
using System.Net.Sockets;
using Cronos;
using Microsoft.Extensions.Logging;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Default implementation of the export scheduler service.
/// </summary>
public sealed class ExportSchedulerService : IExportSchedulerService
{
private readonly IExportScheduleStore _scheduleStore;
private readonly ILogger<ExportSchedulerService> _logger;
private readonly ConcurrentDictionary<string, CronExpression> _cronCache = new();
// Pause profiles after this many consecutive failures
private const int MaxConsecutiveFailuresBeforePause = 10;
public ExportSchedulerService(
IExportScheduleStore scheduleStore,
ILogger<ExportSchedulerService> logger)
{
_scheduleStore = scheduleStore ?? throw new ArgumentNullException(nameof(scheduleStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public DateTimeOffset? GetNextScheduledTime(
Guid profileId,
string cronExpression,
string timezone,
DateTimeOffset from)
{
if (string.IsNullOrWhiteSpace(cronExpression))
return null;
try
{
var cron = GetOrParseCron(cronExpression);
var tz = TimeZoneInfo.FindSystemTimeZoneById(timezone) ?? TimeZoneInfo.Utc;
var next = cron.GetNextOccurrence(from.UtcDateTime, tz);
return next.HasValue ? new DateTimeOffset(next.Value, TimeSpan.Zero) : null;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to compute next schedule for profile {ProfileId}", profileId);
return null;
}
}
/// <inheritdoc />
public async Task<ExportTriggerResult> TriggerAsync(
ExportTriggerRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Triggering export for profile {ProfileId} from {Source}",
request.ProfileId, request.Source);
// Get current status
var status = await _scheduleStore.GetStatusAsync(request.ProfileId, cancellationToken);
// Check if profile is paused due to failures (unless forced)
if (!request.Force && status?.IsPausedDueToFailures == true)
{
_logger.LogWarning(
"Trigger rejected for profile {ProfileId}: paused due to {Failures} consecutive failures",
request.ProfileId, status.ConsecutiveFailures);
return ExportTriggerResult.Rejected(
ExportTriggerRejection.PausedDueToFailures,
$"Profile paused after {status.ConsecutiveFailures} consecutive failures");
}
// Check if already running
if (status?.IsRunning == true)
{
_logger.LogInformation(
"Trigger rejected for profile {ProfileId}: already running (run {RunId})",
request.ProfileId, status.CurrentRunId);
return ExportTriggerResult.Rejected(
ExportTriggerRejection.ConcurrencyLimitReached,
$"Profile already running (run {status.CurrentRunId})");
}
// Create new run
var runId = Guid.NewGuid();
await _scheduleStore.RecordTriggerAsync(
request.ProfileId,
runId,
request.Source,
request.CorrelationId,
request.InitiatedBy,
cancellationToken);
_logger.LogInformation(
"Created run {RunId} for profile {ProfileId}",
runId, request.ProfileId);
return ExportTriggerResult.Success(runId);
}
/// <inheritdoc />
public Task<ScheduledExportStatus?> GetStatusAsync(
Guid profileId,
CancellationToken cancellationToken = default)
{
return _scheduleStore.GetStatusAsync(profileId, cancellationToken);
}
/// <inheritdoc />
public async Task UpdateRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure = null,
CancellationToken cancellationToken = default)
{
_logger.LogInformation(
"Updating run completion for {RunId}: success={Success}",
runId, success);
await _scheduleStore.RecordRunCompletionAsync(
runId,
success,
failure,
cancellationToken);
// Check if we should pause the profile
if (!success && failure?.Class != ExportFailureClass.Cancelled)
{
var status = await _scheduleStore.GetStatusByRunAsync(runId, cancellationToken);
if (status?.ConsecutiveFailures >= MaxConsecutiveFailuresBeforePause)
{
_logger.LogWarning(
"Pausing profile {ProfileId} after {Failures} consecutive failures",
status.ProfileId, status.ConsecutiveFailures);
await _scheduleStore.SetPausedAsync(status.ProfileId, true, cancellationToken);
}
}
}
/// <inheritdoc />
public (bool IsValid, string? ErrorMessage) ValidateCronExpression(string cronExpression)
{
if (string.IsNullOrWhiteSpace(cronExpression))
return (false, "Cron expression cannot be empty");
try
{
// Try parsing - support both 5-field (minute-only) and 6-field (with seconds)
var format = cronExpression.Trim().Split(' ').Length == 6
? CronFormat.IncludeSeconds
: CronFormat.Standard;
CronExpression.Parse(cronExpression, format);
return (true, null);
}
catch (CronFormatException ex)
{
return (false, $"Invalid cron expression: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> GetProfilesDueForExecutionAsync(
Guid tenantId,
DateTimeOffset asOf,
CancellationToken cancellationToken = default)
{
var profiles = await _scheduleStore.GetScheduledProfilesAsync(tenantId, cancellationToken);
var due = new List<Guid>();
foreach (var profile in profiles)
{
if (string.IsNullOrWhiteSpace(profile.CronExpression))
continue;
var status = await _scheduleStore.GetStatusAsync(profile.ProfileId, cancellationToken);
// Skip if running or paused
if (status?.IsRunning == true || status?.IsPausedDueToFailures == true)
continue;
// Check if due
var nextRun = status?.NextScheduledRun;
if (nextRun.HasValue && nextRun.Value <= asOf)
{
due.Add(profile.ProfileId);
}
}
return due;
}
/// <inheritdoc />
public TimeSpan? ComputeRetryDelay(ExportRetryPolicy policy, int failureCount)
{
if (failureCount >= policy.MaxRetries)
return null;
var delay = policy.InitialDelaySeconds * Math.Pow(policy.BackoffMultiplier, failureCount);
var cappedDelay = Math.Min(delay, policy.MaxDelaySeconds);
return TimeSpan.FromSeconds(cappedDelay);
}
/// <inheritdoc />
public ExportFailureClass ClassifyFailure(Exception exception)
{
return exception switch
{
// Network-related
SocketException => ExportFailureClass.NetworkError,
HttpRequestException httpEx when IsTransient(httpEx) => ExportFailureClass.Transient,
HttpRequestException httpEx when httpEx.StatusCode == HttpStatusCode.TooManyRequests => ExportFailureClass.RateLimit,
HttpRequestException httpEx when httpEx.StatusCode == HttpStatusCode.Unauthorized => ExportFailureClass.AuthFailure,
HttpRequestException httpEx when httpEx.StatusCode == HttpStatusCode.Forbidden => ExportFailureClass.AuthFailure,
// Timeout
TimeoutException => ExportFailureClass.Transient,
TaskCanceledException tcEx when tcEx.CancellationToken.IsCancellationRequested => ExportFailureClass.Cancelled,
TaskCanceledException => ExportFailureClass.Transient,
OperationCanceledException ocEx when ocEx.CancellationToken.IsCancellationRequested => ExportFailureClass.Cancelled,
// Validation
ArgumentException => ExportFailureClass.ValidationError,
FormatException => ExportFailureClass.ValidationError,
// IO
IOException => ExportFailureClass.Transient,
UnauthorizedAccessException => ExportFailureClass.AuthFailure,
// Default
_ => ExportFailureClass.Unknown
};
}
private static bool IsTransient(HttpRequestException ex)
{
return ex.StatusCode switch
{
HttpStatusCode.RequestTimeout => true,
HttpStatusCode.BadGateway => true,
HttpStatusCode.ServiceUnavailable => true,
HttpStatusCode.GatewayTimeout => true,
null => true, // Connection failures
_ => false
};
}
private CronExpression GetOrParseCron(string expression)
{
return _cronCache.GetOrAdd(expression, expr =>
{
var format = expr.Trim().Split(' ').Length == 6
? CronFormat.IncludeSeconds
: CronFormat.Standard;
return CronExpression.Parse(expr, format);
});
}
}
/// <summary>
/// Store interface for schedule state.
/// </summary>
public interface IExportScheduleStore
{
/// <summary>
/// Gets the current status for a profile.
/// </summary>
Task<ScheduledExportStatus?> GetStatusAsync(Guid profileId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the status by run ID.
/// </summary>
Task<ScheduledExportStatus?> GetStatusByRunAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Records a trigger/run start.
/// </summary>
Task RecordTriggerAsync(
Guid profileId,
Guid runId,
ExportTriggerSource source,
string? correlationId,
string? initiatedBy,
CancellationToken cancellationToken = default);
/// <summary>
/// Records run completion.
/// </summary>
Task RecordRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets the paused state for a profile.
/// </summary>
Task SetPausedAsync(Guid profileId, bool paused, CancellationToken cancellationToken = default);
/// <summary>
/// Gets all scheduled profiles for a tenant.
/// </summary>
Task<IReadOnlyList<ScheduledProfileInfo>> GetScheduledProfilesAsync(
Guid tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the next scheduled run time.
/// </summary>
Task UpdateNextScheduledRunAsync(
Guid profileId,
DateTimeOffset? nextRun,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Basic profile info for scheduling.
/// </summary>
public sealed record ScheduledProfileInfo
{
public required Guid ProfileId { get; init; }
public required Guid TenantId { get; init; }
public string? CronExpression { get; init; }
public string Timezone { get; init; } = "UTC";
public bool Enabled { get; init; }
}

View File

@@ -0,0 +1,622 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Configuration for export scheduling.
/// </summary>
public sealed record ExportScheduleConfig
{
/// <summary>
/// Cron expression for scheduled execution (5 or 6 field format).
/// </summary>
[JsonPropertyName("cronExpression")]
public string? CronExpression { get; init; }
/// <summary>
/// Timezone for cron interpretation (IANA format, e.g., "UTC", "America/New_York").
/// </summary>
[JsonPropertyName("timezone")]
public string Timezone { get; init; } = "UTC";
/// <summary>
/// Whether scheduling is enabled.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
/// <summary>
/// Maximum concurrent runs per profile.
/// </summary>
[JsonPropertyName("maxConcurrentRuns")]
public int MaxConcurrentRuns { get; init; } = 1;
/// <summary>
/// Event triggers that initiate runs.
/// </summary>
[JsonPropertyName("eventTriggers")]
public IReadOnlyList<ExportEventTrigger> EventTriggers { get; init; } = [];
/// <summary>
/// Retry configuration for failed runs.
/// </summary>
[JsonPropertyName("retryPolicy")]
public ExportRetryPolicy RetryPolicy { get; init; } = new();
/// <summary>
/// Retention configuration for completed runs.
/// </summary>
[JsonPropertyName("retention")]
public ExportRetentionConfig Retention { get; init; } = new();
}
/// <summary>
/// Event trigger for export runs.
/// </summary>
public sealed record ExportEventTrigger
{
/// <summary>
/// Event type that triggers the export.
/// </summary>
[JsonPropertyName("eventType")]
public required ExportEventType EventType { get; init; }
/// <summary>
/// Filter conditions for the event (JSON-encoded).
/// </summary>
[JsonPropertyName("filterJson")]
public string? FilterJson { get; init; }
/// <summary>
/// Whether this trigger is enabled.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
/// <summary>
/// Debounce window in seconds (coalesce events within this window).
/// </summary>
[JsonPropertyName("debounceSeconds")]
public int DebounceSeconds { get; init; } = 0;
}
/// <summary>
/// Types of events that can trigger exports.
/// </summary>
public enum ExportEventType
{
/// <summary>
/// New advisory ingested.
/// </summary>
AdvisoryIngested = 1,
/// <summary>
/// Advisory updated or withdrawn.
/// </summary>
AdvisoryUpdated = 2,
/// <summary>
/// New VEX document created.
/// </summary>
VexCreated = 3,
/// <summary>
/// VEX document updated.
/// </summary>
VexUpdated = 4,
/// <summary>
/// New SBOM ingested.
/// </summary>
SbomIngested = 5,
/// <summary>
/// Scan completed.
/// </summary>
ScanCompleted = 6,
/// <summary>
/// Policy evaluation completed.
/// </summary>
PolicyEvaluated = 7,
/// <summary>
/// Attestation created.
/// </summary>
AttestationCreated = 8,
/// <summary>
/// Manual trigger via API.
/// </summary>
ApiTrigger = 100,
/// <summary>
/// Webhook trigger.
/// </summary>
WebhookTrigger = 101
}
/// <summary>
/// Retry policy for failed export runs.
/// </summary>
public sealed record ExportRetryPolicy
{
/// <summary>
/// Maximum number of retry attempts.
/// </summary>
[JsonPropertyName("maxRetries")]
public int MaxRetries { get; init; } = 3;
/// <summary>
/// Initial delay between retries in seconds.
/// </summary>
[JsonPropertyName("initialDelaySeconds")]
public int InitialDelaySeconds { get; init; } = 60;
/// <summary>
/// Maximum delay between retries in seconds.
/// </summary>
[JsonPropertyName("maxDelaySeconds")]
public int MaxDelaySeconds { get; init; } = 3600;
/// <summary>
/// Backoff multiplier (exponential backoff).
/// </summary>
[JsonPropertyName("backoffMultiplier")]
public double BackoffMultiplier { get; init; } = 2.0;
/// <summary>
/// Failure types that should be retried.
/// </summary>
[JsonPropertyName("retryableFailures")]
public IReadOnlyList<ExportFailureClass> RetryableFailures { get; init; } =
[
ExportFailureClass.Transient,
ExportFailureClass.RateLimit,
ExportFailureClass.NetworkError
];
}
/// <summary>
/// Retention configuration for export artifacts.
/// </summary>
public sealed record ExportRetentionConfig
{
/// <summary>
/// Retention period in days for successful runs.
/// </summary>
[JsonPropertyName("successfulRunDays")]
public int SuccessfulRunDays { get; init; } = 30;
/// <summary>
/// Retention period in days for failed runs.
/// </summary>
[JsonPropertyName("failedRunDays")]
public int FailedRunDays { get; init; } = 7;
/// <summary>
/// Maximum total runs to retain per profile.
/// </summary>
[JsonPropertyName("maxRunsPerProfile")]
public int MaxRunsPerProfile { get; init; } = 100;
/// <summary>
/// Whether to keep runs with legal hold.
/// </summary>
[JsonPropertyName("respectLegalHold")]
public bool RespectLegalHold { get; init; } = true;
/// <summary>
/// Minimum runs to retain even if expired.
/// </summary>
[JsonPropertyName("minimumRunsToRetain")]
public int MinimumRunsToRetain { get; init; } = 5;
}
/// <summary>
/// Classification of export failures.
/// </summary>
public enum ExportFailureClass
{
/// <summary>
/// Unknown or unclassified failure.
/// </summary>
Unknown = 0,
/// <summary>
/// Transient failure (network timeout, temporary unavailability).
/// </summary>
Transient = 1,
/// <summary>
/// Rate limit exceeded.
/// </summary>
RateLimit = 2,
/// <summary>
/// Network error (connection refused, DNS failure).
/// </summary>
NetworkError = 3,
/// <summary>
/// Permanent failure (invalid configuration, missing data).
/// </summary>
Permanent = 4,
/// <summary>
/// Authentication or authorization failure.
/// </summary>
AuthFailure = 5,
/// <summary>
/// Quota exceeded (storage, API calls).
/// </summary>
QuotaExceeded = 6,
/// <summary>
/// Validation error in input data.
/// </summary>
ValidationError = 7,
/// <summary>
/// Dependency unavailable (KMS, signing service).
/// </summary>
DependencyFailure = 8,
/// <summary>
/// Run was cancelled.
/// </summary>
Cancelled = 9
}
/// <summary>
/// Detailed failure information for export runs.
/// </summary>
public sealed record ExportFailureInfo
{
/// <summary>
/// Failure classification.
/// </summary>
[JsonPropertyName("class")]
public required ExportFailureClass Class { get; init; }
/// <summary>
/// Error code (domain-specific).
/// </summary>
[JsonPropertyName("errorCode")]
public string? ErrorCode { get; init; }
/// <summary>
/// Human-readable error message.
/// </summary>
[JsonPropertyName("message")]
public required string Message { get; init; }
/// <summary>
/// Detailed error information (stack trace, inner errors).
/// </summary>
[JsonPropertyName("details")]
public string? Details { get; init; }
/// <summary>
/// When the failure occurred.
/// </summary>
[JsonPropertyName("occurredAt")]
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Whether retry is recommended.
/// </summary>
[JsonPropertyName("retryable")]
public bool Retryable { get; init; }
/// <summary>
/// Suggested retry delay in seconds.
/// </summary>
[JsonPropertyName("retryAfterSeconds")]
public int? RetryAfterSeconds { get; init; }
}
/// <summary>
/// Status of a scheduled export.
/// </summary>
public sealed record ScheduledExportStatus
{
/// <summary>
/// Profile ID.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// Last successful run timestamp.
/// </summary>
public DateTimeOffset? LastSuccessfulRun { get; init; }
/// <summary>
/// Last failed run timestamp.
/// </summary>
public DateTimeOffset? LastFailedRun { get; init; }
/// <summary>
/// Next scheduled run timestamp.
/// </summary>
public DateTimeOffset? NextScheduledRun { get; init; }
/// <summary>
/// Current retry count for consecutive failures.
/// </summary>
public int ConsecutiveFailures { get; init; }
/// <summary>
/// Whether the profile is currently executing.
/// </summary>
public bool IsRunning { get; init; }
/// <summary>
/// Current run ID if running.
/// </summary>
public Guid? CurrentRunId { get; init; }
/// <summary>
/// Whether scheduling is paused due to failures.
/// </summary>
public bool IsPausedDueToFailures { get; init; }
/// <summary>
/// Last failure info if any.
/// </summary>
public ExportFailureInfo? LastFailure { get; init; }
}
/// <summary>
/// Request to trigger an export.
/// </summary>
public sealed record ExportTriggerRequest
{
/// <summary>
/// Profile ID to execute.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// Trigger source.
/// </summary>
public required ExportTriggerSource Source { get; init; }
/// <summary>
/// Correlation ID for tracing.
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// User or service that initiated the trigger.
/// </summary>
public string? InitiatedBy { get; init; }
/// <summary>
/// Event data for event-triggered exports.
/// </summary>
public string? EventDataJson { get; init; }
/// <summary>
/// Override configuration (JSON).
/// </summary>
public string? OverrideConfigJson { get; init; }
/// <summary>
/// Whether to force run even if profile is paused.
/// </summary>
public bool Force { get; init; }
/// <summary>
/// Priority hint (higher = more urgent).
/// </summary>
public int Priority { get; init; }
}
/// <summary>
/// Source of export trigger.
/// </summary>
public enum ExportTriggerSource
{
/// <summary>
/// Scheduled via cron.
/// </summary>
Scheduled = 1,
/// <summary>
/// Triggered by event.
/// </summary>
Event = 2,
/// <summary>
/// Manual trigger via API.
/// </summary>
Manual = 3,
/// <summary>
/// Retry of a failed run.
/// </summary>
Retry = 4,
/// <summary>
/// System-initiated (e.g., startup catch-up).
/// </summary>
System = 5
}
/// <summary>
/// Result of a trigger request.
/// </summary>
public sealed record ExportTriggerResult
{
/// <summary>
/// Whether the trigger was accepted.
/// </summary>
public required bool Accepted { get; init; }
/// <summary>
/// Run ID if a new run was created.
/// </summary>
public Guid? RunId { get; init; }
/// <summary>
/// Reason if not accepted.
/// </summary>
public string? RejectionReason { get; init; }
/// <summary>
/// Rejection code.
/// </summary>
public ExportTriggerRejection? RejectionCode { get; init; }
public static ExportTriggerResult Success(Guid runId)
=> new() { Accepted = true, RunId = runId };
public static ExportTriggerResult Rejected(ExportTriggerRejection code, string reason)
=> new() { Accepted = false, RejectionCode = code, RejectionReason = reason };
}
/// <summary>
/// Reasons for rejecting a trigger.
/// </summary>
public enum ExportTriggerRejection
{
/// <summary>
/// Profile not found.
/// </summary>
ProfileNotFound = 1,
/// <summary>
/// Profile is not active.
/// </summary>
ProfileNotActive = 2,
/// <summary>
/// Maximum concurrent runs reached.
/// </summary>
ConcurrencyLimitReached = 3,
/// <summary>
/// Profile is paused due to failures.
/// </summary>
PausedDueToFailures = 4,
/// <summary>
/// Event trigger not enabled.
/// </summary>
TriggerNotEnabled = 5,
/// <summary>
/// Debounce window active.
/// </summary>
DebouncePending = 6,
/// <summary>
/// Rate limit exceeded.
/// </summary>
RateLimited = 7,
/// <summary>
/// Invalid configuration.
/// </summary>
InvalidConfiguration = 8
}
/// <summary>
/// Request for retention pruning.
/// </summary>
public sealed record RetentionPruneRequest
{
/// <summary>
/// Tenant ID to prune.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Optional profile ID to restrict pruning.
/// </summary>
public Guid? ProfileId { get; init; }
/// <summary>
/// Whether to actually delete (false = dry run).
/// </summary>
public bool Execute { get; init; } = true;
/// <summary>
/// Override retention config.
/// </summary>
public ExportRetentionConfig? OverrideRetention { get; init; }
}
/// <summary>
/// Result of retention pruning.
/// </summary>
public sealed record RetentionPruneResult
{
/// <summary>
/// Whether pruning was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Number of runs pruned.
/// </summary>
public int RunsPruned { get; init; }
/// <summary>
/// Number of artifacts deleted.
/// </summary>
public int ArtifactsDeleted { get; init; }
/// <summary>
/// Bytes freed.
/// </summary>
public long BytesFreed { get; init; }
/// <summary>
/// Runs that were skipped due to legal hold.
/// </summary>
public int RunsSkippedLegalHold { get; init; }
/// <summary>
/// Errors encountered during pruning.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
/// <summary>
/// Details of pruned runs.
/// </summary>
public IReadOnlyList<PrunedRunInfo> PrunedRuns { get; init; } = [];
}
/// <summary>
/// Information about a pruned run.
/// </summary>
public sealed record PrunedRunInfo
{
/// <summary>
/// Run ID.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Profile ID.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// When the run completed.
/// </summary>
public required DateTimeOffset CompletedAt { get; init; }
/// <summary>
/// Number of artifacts deleted.
/// </summary>
public int ArtifactsDeleted { get; init; }
/// <summary>
/// Bytes freed from this run.
/// </summary>
public long BytesFreed { get; init; }
}

View File

@@ -0,0 +1,44 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Extension methods for registering export scheduling services.
/// </summary>
public static class ExportSchedulingServiceCollectionExtensions
{
/// <summary>
/// Registers export scheduling services with in-memory stores.
/// </summary>
public static IServiceCollection AddExportScheduling(this IServiceCollection services)
{
// Register stores (in-memory by default)
services.AddSingleton<IExportScheduleStore, InMemoryExportScheduleStore>();
services.AddSingleton<IExportRetentionStore, InMemoryExportRetentionStore>();
// Register services
services.AddSingleton<IExportSchedulerService, ExportSchedulerService>();
services.AddSingleton<IExportRetentionService, ExportRetentionService>();
return services;
}
/// <summary>
/// Registers export scheduling services with custom stores.
/// </summary>
public static IServiceCollection AddExportScheduling<TScheduleStore, TRetentionStore>(
this IServiceCollection services)
where TScheduleStore : class, IExportScheduleStore
where TRetentionStore : class, IExportRetentionStore
{
// Register custom stores
services.AddSingleton<IExportScheduleStore, TScheduleStore>();
services.AddSingleton<IExportRetentionStore, TRetentionStore>();
// Register services
services.AddSingleton<IExportSchedulerService, ExportSchedulerService>();
services.AddSingleton<IExportRetentionService, ExportRetentionService>();
return services;
}
}

View File

@@ -0,0 +1,145 @@
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// Service for managing export scheduling.
/// </summary>
public interface IExportSchedulerService
{
/// <summary>
/// Gets the next scheduled run time for a profile.
/// </summary>
/// <param name="profileId">Profile ID.</param>
/// <param name="cronExpression">Cron expression.</param>
/// <param name="timezone">Timezone name.</param>
/// <param name="from">Start time for calculation.</param>
/// <returns>Next run time, or null if no next occurrence.</returns>
DateTimeOffset? GetNextScheduledTime(
Guid profileId,
string cronExpression,
string timezone,
DateTimeOffset from);
/// <summary>
/// Triggers an export run.
/// </summary>
/// <param name="request">Trigger request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Trigger result.</returns>
Task<ExportTriggerResult> TriggerAsync(
ExportTriggerRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the status of a scheduled export.
/// </summary>
/// <param name="profileId">Profile ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Schedule status.</returns>
Task<ScheduledExportStatus?> GetStatusAsync(
Guid profileId,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the status after a run completes.
/// </summary>
/// <param name="runId">Run ID.</param>
/// <param name="success">Whether the run succeeded.</param>
/// <param name="failure">Failure info if failed.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UpdateRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates a cron expression.
/// </summary>
/// <param name="cronExpression">Cron expression to validate.</param>
/// <returns>Validation result with error message if invalid.</returns>
(bool IsValid, string? ErrorMessage) ValidateCronExpression(string cronExpression);
/// <summary>
/// Gets profiles due for scheduled execution.
/// </summary>
/// <param name="tenantId">Tenant ID.</param>
/// <param name="asOf">Time to check against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of profile IDs due for execution.</returns>
Task<IReadOnlyList<Guid>> GetProfilesDueForExecutionAsync(
Guid tenantId,
DateTimeOffset asOf,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes retry delay based on policy and failure count.
/// </summary>
/// <param name="policy">Retry policy.</param>
/// <param name="failureCount">Number of consecutive failures.</param>
/// <returns>Delay before next retry, or null if no more retries.</returns>
TimeSpan? ComputeRetryDelay(ExportRetryPolicy policy, int failureCount);
/// <summary>
/// Classifies an exception into a failure class.
/// </summary>
/// <param name="exception">The exception.</param>
/// <returns>Failure classification.</returns>
ExportFailureClass ClassifyFailure(Exception exception);
}
/// <summary>
/// Service for managing export retention.
/// </summary>
public interface IExportRetentionService
{
/// <summary>
/// Prunes expired runs and artifacts.
/// </summary>
/// <param name="request">Prune request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Prune result.</returns>
Task<RetentionPruneResult> PruneAsync(
RetentionPruneRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets runs eligible for pruning.
/// </summary>
/// <param name="tenantId">Tenant ID.</param>
/// <param name="profileId">Optional profile ID.</param>
/// <param name="retention">Retention config.</param>
/// <param name="asOf">Time to check against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of run IDs eligible for pruning.</returns>
Task<IReadOnlyList<Guid>> GetRunsEligibleForPruningAsync(
Guid tenantId,
Guid? profileId,
ExportRetentionConfig retention,
DateTimeOffset asOf,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets or removes legal hold on a run.
/// </summary>
/// <param name="runId">Run ID.</param>
/// <param name="hold">Whether to hold or release.</param>
/// <param name="reason">Reason for the hold.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task SetLegalHoldAsync(
Guid runId,
bool hold,
string? reason = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes expiration time for a new run.
/// </summary>
/// <param name="retention">Retention config.</param>
/// <param name="completedAt">When the run completed.</param>
/// <param name="success">Whether the run succeeded.</param>
/// <returns>Expiration timestamp.</returns>
DateTimeOffset ComputeExpiration(
ExportRetentionConfig retention,
DateTimeOffset completedAt,
bool success);
}

View File

@@ -0,0 +1,308 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.Scheduling;
/// <summary>
/// In-memory implementation of the schedule store for testing.
/// </summary>
public sealed class InMemoryExportScheduleStore : IExportScheduleStore
{
private readonly ConcurrentDictionary<Guid, ScheduledExportStatus> _statusByProfile = new();
private readonly ConcurrentDictionary<Guid, Guid> _runToProfile = new();
private readonly ConcurrentDictionary<Guid, List<ScheduledProfileInfo>> _profilesByTenant = new();
private readonly object _lock = new();
/// <summary>
/// Adds a profile for testing.
/// </summary>
public void AddProfile(ScheduledProfileInfo profile)
{
lock (_lock)
{
if (!_profilesByTenant.TryGetValue(profile.TenantId, out var profiles))
{
profiles = [];
_profilesByTenant[profile.TenantId] = profiles;
}
profiles.Add(profile);
// Initialize status
_statusByProfile[profile.ProfileId] = new ScheduledExportStatus
{
ProfileId = profile.ProfileId
};
}
}
/// <summary>
/// Sets status for testing.
/// </summary>
public void SetStatus(ScheduledExportStatus status)
{
_statusByProfile[status.ProfileId] = status;
// Also update run-to-profile mapping if a current run is set
if (status.CurrentRunId.HasValue)
{
_runToProfile[status.CurrentRunId.Value] = status.ProfileId;
}
}
public Task<ScheduledExportStatus?> GetStatusAsync(Guid profileId, CancellationToken cancellationToken = default)
{
_statusByProfile.TryGetValue(profileId, out var status);
return Task.FromResult(status);
}
public Task<ScheduledExportStatus?> GetStatusByRunAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runToProfile.TryGetValue(runId, out var profileId))
{
_statusByProfile.TryGetValue(profileId, out var status);
return Task.FromResult(status);
}
return Task.FromResult<ScheduledExportStatus?>(null);
}
public Task RecordTriggerAsync(
Guid profileId,
Guid runId,
ExportTriggerSource source,
string? correlationId,
string? initiatedBy,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
_runToProfile[runId] = profileId;
_statusByProfile.AddOrUpdate(
profileId,
_ => new ScheduledExportStatus
{
ProfileId = profileId,
IsRunning = true,
CurrentRunId = runId
},
(_, existing) => existing with
{
IsRunning = true,
CurrentRunId = runId
});
}
return Task.CompletedTask;
}
public Task RecordRunCompletionAsync(
Guid runId,
bool success,
ExportFailureInfo? failure,
CancellationToken cancellationToken = default)
{
if (!_runToProfile.TryGetValue(runId, out var profileId))
return Task.CompletedTask;
lock (_lock)
{
if (_statusByProfile.TryGetValue(profileId, out var existing))
{
var now = DateTimeOffset.UtcNow;
var newFailureCount = success ? 0 : existing.ConsecutiveFailures + 1;
_statusByProfile[profileId] = existing with
{
IsRunning = false,
CurrentRunId = null,
LastSuccessfulRun = success ? now : existing.LastSuccessfulRun,
LastFailedRun = success ? existing.LastFailedRun : now,
ConsecutiveFailures = newFailureCount,
LastFailure = failure
};
}
}
return Task.CompletedTask;
}
public Task SetPausedAsync(Guid profileId, bool paused, CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (_statusByProfile.TryGetValue(profileId, out var existing))
{
_statusByProfile[profileId] = existing with
{
IsPausedDueToFailures = paused
};
}
}
return Task.CompletedTask;
}
public Task<IReadOnlyList<ScheduledProfileInfo>> GetScheduledProfilesAsync(
Guid tenantId,
CancellationToken cancellationToken = default)
{
_profilesByTenant.TryGetValue(tenantId, out var profiles);
return Task.FromResult<IReadOnlyList<ScheduledProfileInfo>>(profiles ?? []);
}
public Task UpdateNextScheduledRunAsync(
Guid profileId,
DateTimeOffset? nextRun,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (_statusByProfile.TryGetValue(profileId, out var existing))
{
_statusByProfile[profileId] = existing with
{
NextScheduledRun = nextRun
};
}
}
return Task.CompletedTask;
}
/// <summary>
/// Clears all state.
/// </summary>
public void Clear()
{
_statusByProfile.Clear();
_runToProfile.Clear();
_profilesByTenant.Clear();
}
}
/// <summary>
/// In-memory implementation of the retention store for testing.
/// </summary>
public sealed class InMemoryExportRetentionStore : IExportRetentionStore
{
private readonly ConcurrentDictionary<Guid, DetailedRunInfo> _runs = new();
private readonly ConcurrentDictionary<Guid, List<Guid>> _runsByProfile = new();
private readonly ConcurrentDictionary<Guid, List<Guid>> _profilesByTenant = new();
private readonly ConcurrentDictionary<Guid, (bool Hold, string? Reason)> _legalHolds = new();
private readonly object _lock = new();
/// <summary>
/// Adds a run for testing.
/// </summary>
public void AddRun(DetailedRunInfo run, Guid tenantId)
{
lock (_lock)
{
_runs[run.RunId] = run;
if (!_runsByProfile.TryGetValue(run.ProfileId, out var runs))
{
runs = [];
_runsByProfile[run.ProfileId] = runs;
}
runs.Add(run.RunId);
if (!_profilesByTenant.TryGetValue(tenantId, out var profiles))
{
profiles = [];
_profilesByTenant[tenantId] = profiles;
}
if (!profiles.Contains(run.ProfileId))
{
profiles.Add(run.ProfileId);
}
}
}
public Task<IReadOnlyList<Guid>> GetProfileIdsAsync(Guid tenantId, CancellationToken cancellationToken = default)
{
_profilesByTenant.TryGetValue(tenantId, out var profiles);
return Task.FromResult<IReadOnlyList<Guid>>(profiles ?? []);
}
public Task<IReadOnlyList<RetentionRunInfo>> GetRunsForProfileAsync(Guid profileId, CancellationToken cancellationToken = default)
{
var result = new List<RetentionRunInfo>();
if (_runsByProfile.TryGetValue(profileId, out var runIds))
{
foreach (var runId in runIds)
{
if (_runs.TryGetValue(runId, out var run))
{
_legalHolds.TryGetValue(runId, out var hold);
result.Add(new RetentionRunInfo
{
RunId = run.RunId,
ProfileId = run.ProfileId,
CompletedAt = run.CompletedAt,
ExpiresAt = run.CompletedAt.AddDays(30), // Default expiry
HasLegalHold = hold.Hold
});
}
}
}
return Task.FromResult<IReadOnlyList<RetentionRunInfo>>(result);
}
public Task<DetailedRunInfo?> GetRunInfoAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run))
{
_legalHolds.TryGetValue(runId, out var hold);
return Task.FromResult<DetailedRunInfo?>(run with
{
HasLegalHold = hold.Hold,
LegalHoldReason = hold.Reason
});
}
return Task.FromResult<DetailedRunInfo?>(null);
}
public Task<ArtifactDeleteResult> DeleteRunArtifactsAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run))
{
return Task.FromResult(new ArtifactDeleteResult
{
ArtifactsDeleted = run.ArtifactCount,
BytesFreed = run.TotalSizeBytes
});
}
return Task.FromResult(new ArtifactDeleteResult());
}
public Task DeleteRunAsync(Guid runId, CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (_runs.TryRemove(runId, out var run))
{
if (_runsByProfile.TryGetValue(run.ProfileId, out var runs))
{
runs.Remove(runId);
}
}
_legalHolds.TryRemove(runId, out _);
}
return Task.CompletedTask;
}
public Task SetLegalHoldAsync(Guid runId, bool hold, string? reason, CancellationToken cancellationToken = default)
{
_legalHolds[runId] = (hold, reason);
return Task.CompletedTask;
}
/// <summary>
/// Clears all state.
/// </summary>
public void Clear()
{
_runs.Clear();
_runsByProfile.Clear();
_profilesByTenant.Clear();
_legalHolds.Clear();
}
}

View File

@@ -12,6 +12,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Cronos" Version="0.9.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>

View File

@@ -0,0 +1,134 @@
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Service for enforcing tenant scope in export operations.
/// </summary>
public interface ITenantScopeEnforcer
{
/// <summary>
/// Checks whether an export operation is allowed under tenant scope rules.
/// </summary>
Task<TenantScopeCheckResult> CheckScopeAsync(
TenantScopeCheckRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Creates a tenant-scoped path for an artifact.
/// </summary>
TenantScopedPath CreateScopedPath(
string tenantId,
string? projectId,
string originalPath);
/// <summary>
/// Parses a scoped path back into tenant/project/relative components.
/// </summary>
TenantScopedPath? ParseScopedPath(string scopedPath);
/// <summary>
/// Validates tenant and project IDs.
/// </summary>
TenantScopeValidationResult ValidateIds(string tenantId, string? projectId = null);
/// <summary>
/// Creates provenance context for a tenant-scoped export.
/// </summary>
TenantProvenanceContext CreateProvenanceContext(
string tenantId,
string? projectId,
string exportRunId,
IReadOnlyList<TenantScopedManifestEntry> entries,
IReadOnlyList<CrossTenantRef>? crossTenantRefs = null);
/// <summary>
/// Generates the scope prefix for a tenant/project combination.
/// </summary>
string GetScopePrefix(string tenantId, string? projectId = null);
/// <summary>
/// Checks if a path belongs to a specific tenant.
/// </summary>
bool IsPathOwnedByTenant(string path, string tenantId);
/// <summary>
/// Gets the configuration for a tenant (may have overrides).
/// </summary>
TenantScopeConfig GetConfigForTenant(string tenantId);
}
/// <summary>
/// Store for tenant scope configurations.
/// </summary>
public interface ITenantScopeConfigStore
{
/// <summary>
/// Gets the global default configuration.
/// </summary>
TenantScopeConfig GetDefaultConfig();
/// <summary>
/// Gets configuration for a specific tenant (with any overrides applied).
/// </summary>
Task<TenantScopeConfig?> GetTenantConfigAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves configuration for a specific tenant.
/// </summary>
Task SaveTenantConfigAsync(
string tenantId,
TenantScopeConfig config,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a tenant is in the global cross-tenant whitelist.
/// </summary>
Task<bool> IsInGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Adds a tenant to the global cross-tenant whitelist.
/// </summary>
Task AddToGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Removes a tenant from the global cross-tenant whitelist.
/// </summary>
Task RemoveFromGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Store for tenant resource ownership tracking.
/// </summary>
public interface ITenantResourceStore
{
/// <summary>
/// Gets the tenant ID that owns a resource.
/// </summary>
Task<string?> GetResourceTenantAsync(
string resourceId,
CancellationToken cancellationToken = default);
/// <summary>
/// Registers resource ownership for a tenant.
/// </summary>
Task RegisterResourceAsync(
string tenantId,
string resourceId,
string resourceType,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if all resources belong to the specified tenant.
/// </summary>
Task<(bool AllBelong, IReadOnlyList<string> ViolatingResources)> CheckResourceOwnershipAsync(
string tenantId,
IReadOnlyList<string> resourceIds,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,144 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// In-memory implementation of tenant scope config store for testing.
/// </summary>
public sealed class InMemoryTenantScopeConfigStore : ITenantScopeConfigStore
{
private readonly ConcurrentDictionary<string, TenantScopeConfig> _configs = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, bool> _globalWhitelist = new(StringComparer.OrdinalIgnoreCase);
private TenantScopeConfig _defaultConfig = new();
/// <summary>
/// Sets the default configuration.
/// </summary>
public void SetDefaultConfig(TenantScopeConfig config)
{
_defaultConfig = config;
}
/// <inheritdoc />
public TenantScopeConfig GetDefaultConfig() => _defaultConfig;
/// <inheritdoc />
public Task<TenantScopeConfig?> GetTenantConfigAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
_configs.TryGetValue(tenantId, out var config);
return Task.FromResult(config);
}
/// <inheritdoc />
public Task SaveTenantConfigAsync(
string tenantId,
TenantScopeConfig config,
CancellationToken cancellationToken = default)
{
_configs[tenantId] = config;
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<bool> IsInGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
return Task.FromResult(_globalWhitelist.ContainsKey(tenantId));
}
/// <inheritdoc />
public Task AddToGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
_globalWhitelist[tenantId] = true;
return Task.CompletedTask;
}
/// <inheritdoc />
public Task RemoveFromGlobalWhitelistAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
_globalWhitelist.TryRemove(tenantId, out _);
return Task.CompletedTask;
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_configs.Clear();
_globalWhitelist.Clear();
_defaultConfig = new TenantScopeConfig();
}
}
/// <summary>
/// In-memory implementation of tenant resource store for testing.
/// </summary>
public sealed class InMemoryTenantResourceStore : ITenantResourceStore
{
private readonly ConcurrentDictionary<string, ResourceInfo> _resources = new(StringComparer.OrdinalIgnoreCase);
/// <inheritdoc />
public Task<string?> GetResourceTenantAsync(
string resourceId,
CancellationToken cancellationToken = default)
{
_resources.TryGetValue(resourceId, out var info);
return Task.FromResult(info?.TenantId);
}
/// <inheritdoc />
public Task RegisterResourceAsync(
string tenantId,
string resourceId,
string resourceType,
CancellationToken cancellationToken = default)
{
_resources[resourceId] = new ResourceInfo(tenantId, resourceType);
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<(bool AllBelong, IReadOnlyList<string> ViolatingResources)> CheckResourceOwnershipAsync(
string tenantId,
IReadOnlyList<string> resourceIds,
CancellationToken cancellationToken = default)
{
var violating = new List<string>();
foreach (var resourceId in resourceIds)
{
if (_resources.TryGetValue(resourceId, out var info))
{
if (!string.Equals(info.TenantId, tenantId, StringComparison.OrdinalIgnoreCase))
{
violating.Add(resourceId);
}
}
else
{
// Resource not registered - could be violation depending on policy
// For now, unregistered resources are allowed (may belong to tenant)
}
}
return Task.FromResult<(bool, IReadOnlyList<string>)>((violating.Count == 0, violating));
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_resources.Clear();
}
private sealed record ResourceInfo(string TenantId, string ResourceType);
}

View File

@@ -0,0 +1,324 @@
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Default implementation of tenant scope enforcer.
/// </summary>
public sealed class TenantScopeEnforcer : ITenantScopeEnforcer
{
private readonly ITenantScopeConfigStore _configStore;
private readonly ITenantResourceStore _resourceStore;
private readonly ILogger<TenantScopeEnforcer> _logger;
private readonly TimeProvider _timeProvider;
public TenantScopeEnforcer(
ITenantScopeConfigStore configStore,
ITenantResourceStore resourceStore,
ILogger<TenantScopeEnforcer> logger,
TimeProvider? timeProvider = null)
{
_configStore = configStore ?? throw new ArgumentNullException(nameof(configStore));
_resourceStore = resourceStore ?? throw new ArgumentNullException(nameof(resourceStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<TenantScopeCheckResult> CheckScopeAsync(
TenantScopeCheckRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
// Validate tenant IDs
var requestingValidation = ValidateIds(request.RequestingTenantId, request.RequestingProjectId);
if (!requestingValidation.IsValid)
{
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.InvalidTenantId,
requestingValidation.Errors[0].Message);
}
var targetValidation = ValidateIds(request.TargetTenantId, request.TargetProjectId);
if (!targetValidation.IsValid)
{
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.InvalidTenantId,
targetValidation.Errors[0].Message);
}
// Get config for requesting tenant
var config = await GetConfigOrDefaultAsync(request.RequestingTenantId, cancellationToken);
if (!config.Enabled)
{
// Scope enforcement disabled - allow everything
_logger.LogDebug(
"Tenant scope enforcement disabled for tenant {TenantId}",
request.RequestingTenantId);
return TenantScopeCheckResult.Allow();
}
// Check if this is a same-tenant operation
var isCrossTenant = !string.Equals(
request.RequestingTenantId,
request.TargetTenantId,
StringComparison.OrdinalIgnoreCase);
if (!isCrossTenant)
{
// Same tenant - check project scope if applicable
if (request.RequestingProjectId is not null && request.TargetProjectId is not null &&
!string.Equals(request.RequestingProjectId, request.TargetProjectId, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Project scope mismatch: requesting={Requesting}, target={Target}",
request.RequestingProjectId,
request.TargetProjectId);
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.ProjectScopeViolation,
$"Cannot export from project {request.RequestingProjectId} to project {request.TargetProjectId}");
}
// Check resource ownership if resources specified
if (request.ResourceIds.Count > 0)
{
var (allBelong, violating) = await _resourceStore.CheckResourceOwnershipAsync(
request.RequestingTenantId,
request.ResourceIds,
cancellationToken);
if (!allBelong)
{
_logger.LogWarning(
"Resource scope violation for tenant {TenantId}: {ViolatingCount} resources",
request.RequestingTenantId,
violating.Count);
return TenantScopeCheckResult.DenyResources(
violating,
$"Resources do not belong to tenant {request.RequestingTenantId}");
}
}
return TenantScopeCheckResult.Allow();
}
// Cross-tenant operation
_logger.LogInformation(
"Cross-tenant operation: {RequestingTenant} -> {TargetTenant} ({Operation})",
request.RequestingTenantId,
request.TargetTenantId,
request.Operation);
// Check strict isolation
if (config.StrictIsolation)
{
// Check if target is in allowed targets list
if (!config.AllowedTargetTenants.Contains(request.TargetTenantId, StringComparer.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Cross-tenant denied by strict isolation: {Requesting} -> {Target}",
request.RequestingTenantId,
request.TargetTenantId);
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.StrictIsolationViolation,
$"Strict isolation prevents export from tenant {request.RequestingTenantId} to {request.TargetTenantId}");
}
return TenantScopeCheckResult.AllowCrossTenant(viaWhitelist: false);
}
// Check whitelist
if (config.CrossTenantWhitelist.Contains(request.TargetTenantId, StringComparer.OrdinalIgnoreCase))
{
return TenantScopeCheckResult.AllowCrossTenant(viaWhitelist: true);
}
// Check global whitelist
var inGlobalWhitelist = await _configStore.IsInGlobalWhitelistAsync(
request.TargetTenantId,
cancellationToken);
if (inGlobalWhitelist)
{
return TenantScopeCheckResult.AllowCrossTenant(viaWhitelist: true);
}
// Not in any whitelist
return TenantScopeCheckResult.Deny(
TenantScopeDenialReason.TargetTenantNotWhitelisted,
$"Target tenant {request.TargetTenantId} is not whitelisted for cross-tenant exports");
}
public TenantScopedPath CreateScopedPath(
string tenantId,
string? projectId,
string originalPath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(originalPath);
var config = GetConfigForTenant(tenantId);
var prefix = GetScopePrefix(tenantId, projectId, config);
// Normalize and combine paths
var normalizedOriginal = originalPath.TrimStart('/');
var scopedPath = $"{prefix}/{normalizedOriginal}";
return new TenantScopedPath
{
OriginalPath = originalPath,
ScopedPath = scopedPath,
TenantId = tenantId,
ProjectId = projectId ?? config.DefaultProjectId,
RelativePath = normalizedOriginal
};
}
public TenantScopedPath? ParseScopedPath(string scopedPath)
{
if (string.IsNullOrWhiteSpace(scopedPath))
return null;
var config = _configStore.GetDefaultConfig();
// Try to extract tenant and project from path
// Expected format: tenants/{tenantId}/projects/{projectId}/... or tenants/{tenantId}/...
var tenantMatch = Regex.Match(scopedPath, @"^tenants/([^/]+)(?:/projects/([^/]+))?/(.+)$");
if (tenantMatch.Success)
{
var tenantId = tenantMatch.Groups[1].Value;
var projectId = tenantMatch.Groups[2].Success ? tenantMatch.Groups[2].Value : null;
var relativePath = tenantMatch.Groups[3].Value;
return new TenantScopedPath
{
OriginalPath = relativePath,
ScopedPath = scopedPath,
TenantId = tenantId,
ProjectId = projectId,
RelativePath = relativePath
};
}
// Try simpler format: {tenantId}/...
var simpleMatch = Regex.Match(scopedPath, @"^([^/]+)/(.+)$");
if (simpleMatch.Success)
{
var potentialTenantId = simpleMatch.Groups[1].Value;
if (TenantIdValidator.IsValid(potentialTenantId))
{
return new TenantScopedPath
{
OriginalPath = simpleMatch.Groups[2].Value,
ScopedPath = scopedPath,
TenantId = potentialTenantId,
ProjectId = null,
RelativePath = simpleMatch.Groups[2].Value
};
}
}
return null;
}
public TenantScopeValidationResult ValidateIds(string tenantId, string? projectId = null)
{
var tenantValidation = TenantIdValidator.Validate(tenantId);
if (!tenantValidation.IsValid)
{
return tenantValidation;
}
// Project ID validation (same rules, but optional)
if (projectId is not null && !TenantIdValidator.IsValid(projectId))
{
return TenantScopeValidationResult.Invalid(new TenantScopeValidationError
{
Code = TenantScopeErrorCodes.InvalidProjectId,
Message = "Project ID must be 3-64 alphanumeric characters (hyphens/underscores allowed) or a valid GUID",
Field = "projectId"
});
}
return TenantScopeValidationResult.Valid();
}
public TenantProvenanceContext CreateProvenanceContext(
string tenantId,
string? projectId,
string exportRunId,
IReadOnlyList<TenantScopedManifestEntry> entries,
IReadOnlyList<CrossTenantRef>? crossTenantRefs = null)
{
var scopePrefix = GetScopePrefix(tenantId, projectId);
return new TenantProvenanceContext
{
TenantId = tenantId,
ProjectId = projectId,
ExportRunId = exportRunId,
ExportedAt = _timeProvider.GetUtcNow(),
ScopePrefix = scopePrefix,
ArtifactCount = entries.Count,
TotalSizeBytes = entries.Sum(e => e.SizeBytes),
CrossTenantRefs = crossTenantRefs
};
}
public string GetScopePrefix(string tenantId, string? projectId = null)
{
var config = GetConfigForTenant(tenantId);
return GetScopePrefix(tenantId, projectId, config);
}
private static string GetScopePrefix(string tenantId, string? projectId, TenantScopeConfig config)
{
var prefix = config.PathPrefixPattern.Replace("{tenantId}", tenantId);
if (config.IncludeProjectInPath && !string.IsNullOrEmpty(projectId))
{
var projectPrefix = config.ProjectPrefixPattern.Replace("{projectId}", projectId);
prefix = $"{prefix}/{projectPrefix}";
}
else if (config.IncludeProjectInPath)
{
var projectPrefix = config.ProjectPrefixPattern.Replace("{projectId}", config.DefaultProjectId);
prefix = $"{prefix}/{projectPrefix}";
}
return prefix.TrimEnd('/');
}
public bool IsPathOwnedByTenant(string path, string tenantId)
{
if (string.IsNullOrWhiteSpace(path) || string.IsNullOrWhiteSpace(tenantId))
return false;
var parsed = ParseScopedPath(path);
if (parsed is null)
return false;
return string.Equals(parsed.TenantId, tenantId, StringComparison.OrdinalIgnoreCase);
}
public TenantScopeConfig GetConfigForTenant(string tenantId)
{
// Synchronous fallback - in production would cache
var config = _configStore.GetTenantConfigAsync(tenantId, default).GetAwaiter().GetResult();
return config ?? _configStore.GetDefaultConfig();
}
private async Task<TenantScopeConfig> GetConfigOrDefaultAsync(
string tenantId,
CancellationToken cancellationToken)
{
var config = await _configStore.GetTenantConfigAsync(tenantId, cancellationToken);
return config ?? _configStore.GetDefaultConfig();
}
}

View File

@@ -0,0 +1,395 @@
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Configuration for tenant scope enforcement in exports.
/// </summary>
public sealed record TenantScopeConfig
{
/// <summary>
/// Whether tenant scope enforcement is enabled.
/// </summary>
public bool Enabled { get; init; } = true;
/// <summary>
/// Pattern for tenant prefix in paths (e.g., "tenants/{tenantId}" or "{tenantId}").
/// </summary>
public string PathPrefixPattern { get; init; } = "tenants/{tenantId}";
/// <summary>
/// Pattern for project prefix in paths (appended after tenant).
/// </summary>
public string ProjectPrefixPattern { get; init; } = "projects/{projectId}";
/// <summary>
/// Whether to include project in path prefix.
/// </summary>
public bool IncludeProjectInPath { get; init; } = true;
/// <summary>
/// Whether to enforce strict tenant isolation (no cross-tenant refs).
/// </summary>
public bool StrictIsolation { get; init; } = true;
/// <summary>
/// List of tenant IDs allowed for cross-tenant exports.
/// </summary>
public IReadOnlyList<string> CrossTenantWhitelist { get; init; } = [];
/// <summary>
/// List of target tenant IDs this tenant can export to.
/// </summary>
public IReadOnlyList<string> AllowedTargetTenants { get; init; } = [];
/// <summary>
/// Default project ID when none is specified.
/// </summary>
public string DefaultProjectId { get; init; } = "default";
}
/// <summary>
/// Tenant-scoped artifact path information.
/// </summary>
public sealed record TenantScopedPath
{
/// <summary>
/// The original path before tenant scoping.
/// </summary>
public required string OriginalPath { get; init; }
/// <summary>
/// The tenant-scoped path (prefixed with tenant/project).
/// </summary>
public required string ScopedPath { get; init; }
/// <summary>
/// The tenant ID.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// The project ID.
/// </summary>
public string? ProjectId { get; init; }
/// <summary>
/// Path relative to tenant/project prefix.
/// </summary>
public required string RelativePath { get; init; }
}
/// <summary>
/// Request to check tenant scope for an export operation.
/// </summary>
public sealed record TenantScopeCheckRequest
{
/// <summary>
/// The requesting tenant ID.
/// </summary>
public required string RequestingTenantId { get; init; }
/// <summary>
/// The requesting project ID (optional).
/// </summary>
public string? RequestingProjectId { get; init; }
/// <summary>
/// The target tenant ID for the export.
/// </summary>
public required string TargetTenantId { get; init; }
/// <summary>
/// The target project ID (optional).
/// </summary>
public string? TargetProjectId { get; init; }
/// <summary>
/// Resource IDs being accessed.
/// </summary>
public IReadOnlyList<string> ResourceIds { get; init; } = [];
/// <summary>
/// The operation being performed.
/// </summary>
public TenantScopeOperation Operation { get; init; } = TenantScopeOperation.Export;
}
/// <summary>
/// Result of a tenant scope check.
/// </summary>
public sealed record TenantScopeCheckResult
{
/// <summary>
/// Whether the operation is allowed.
/// </summary>
public bool Allowed { get; init; }
/// <summary>
/// Denial reason if not allowed.
/// </summary>
public TenantScopeDenialReason? DenialReason { get; init; }
/// <summary>
/// Detailed message explaining the decision.
/// </summary>
public string? Message { get; init; }
/// <summary>
/// Whether this is a cross-tenant operation.
/// </summary>
public bool IsCrossTenant { get; init; }
/// <summary>
/// Whether the operation was allowed via whitelist.
/// </summary>
public bool AllowedViaWhitelist { get; init; }
/// <summary>
/// Resources that failed scope check.
/// </summary>
public IReadOnlyList<string> DeniedResources { get; init; } = [];
public static TenantScopeCheckResult Allow() => new() { Allowed = true };
public static TenantScopeCheckResult AllowCrossTenant(bool viaWhitelist) => new()
{
Allowed = true,
IsCrossTenant = true,
AllowedViaWhitelist = viaWhitelist
};
public static TenantScopeCheckResult Deny(TenantScopeDenialReason reason, string message) => new()
{
Allowed = false,
DenialReason = reason,
Message = message
};
public static TenantScopeCheckResult DenyResources(IReadOnlyList<string> resources, string message) => new()
{
Allowed = false,
DenialReason = TenantScopeDenialReason.ResourceScopeViolation,
Message = message,
DeniedResources = resources
};
}
/// <summary>
/// Reason for denying a tenant scope check.
/// </summary>
public enum TenantScopeDenialReason
{
/// <summary>Cross-tenant access not allowed.</summary>
CrossTenantNotAllowed,
/// <summary>Target tenant not in whitelist.</summary>
TargetTenantNotWhitelisted,
/// <summary>Resource belongs to different tenant.</summary>
ResourceScopeViolation,
/// <summary>Project scope violation.</summary>
ProjectScopeViolation,
/// <summary>Strict isolation prevents operation.</summary>
StrictIsolationViolation,
/// <summary>Invalid tenant ID format.</summary>
InvalidTenantId,
/// <summary>Tenant scope enforcement is disabled but operation requires it.</summary>
EnforcementDisabled
}
/// <summary>
/// Types of tenant-scoped operations.
/// </summary>
public enum TenantScopeOperation
{
/// <summary>Export data from tenant.</summary>
Export,
/// <summary>Read/access data within tenant.</summary>
Read,
/// <summary>Share data with another tenant.</summary>
Share,
/// <summary>Verify data from tenant.</summary>
Verify,
/// <summary>Delete data within tenant.</summary>
Delete
}
/// <summary>
/// Tenant-scoped manifest entry with prefix information.
/// </summary>
public sealed record TenantScopedManifestEntry
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("projectId")]
public string? ProjectId { get; init; }
[JsonPropertyName("relativePath")]
public required string RelativePath { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("sizeBytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("mediaType")]
public string? MediaType { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Provenance context for tenant-scoped exports.
/// </summary>
public sealed record TenantProvenanceContext
{
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("projectId")]
public string? ProjectId { get; init; }
[JsonPropertyName("exportRunId")]
public required string ExportRunId { get; init; }
[JsonPropertyName("exportedAt")]
public required DateTimeOffset ExportedAt { get; init; }
[JsonPropertyName("scopePrefix")]
public required string ScopePrefix { get; init; }
[JsonPropertyName("artifactCount")]
public int ArtifactCount { get; init; }
[JsonPropertyName("totalSizeBytes")]
public long TotalSizeBytes { get; init; }
[JsonPropertyName("crossTenantRefs")]
public IReadOnlyList<CrossTenantRef>? CrossTenantRefs { get; init; }
}
/// <summary>
/// Reference to a cross-tenant resource in an export.
/// </summary>
public sealed record CrossTenantRef
{
[JsonPropertyName("sourceTenantId")]
public required string SourceTenantId { get; init; }
[JsonPropertyName("resourceId")]
public required string ResourceId { get; init; }
[JsonPropertyName("resourceType")]
public required string ResourceType { get; init; }
[JsonPropertyName("allowedVia")]
public required string AllowedVia { get; init; }
}
/// <summary>
/// Tenant scope validation result.
/// </summary>
public sealed record TenantScopeValidationResult
{
public bool IsValid { get; init; }
public IReadOnlyList<TenantScopeValidationError> Errors { get; init; } = [];
public static TenantScopeValidationResult Valid() => new() { IsValid = true };
public static TenantScopeValidationResult Invalid(params TenantScopeValidationError[] errors) => new()
{
IsValid = false,
Errors = errors
};
}
/// <summary>
/// Validation error for tenant scope.
/// </summary>
public sealed record TenantScopeValidationError
{
public required string Code { get; init; }
public required string Message { get; init; }
public string? Field { get; init; }
}
/// <summary>
/// Error codes for tenant scope enforcement.
/// </summary>
public static class TenantScopeErrorCodes
{
public const string InvalidTenantId = "TENANT_INVALID_ID";
public const string InvalidProjectId = "TENANT_INVALID_PROJECT_ID";
public const string CrossTenantDenied = "TENANT_CROSS_TENANT_DENIED";
public const string NotWhitelisted = "TENANT_NOT_WHITELISTED";
public const string ResourceScopeViolation = "TENANT_RESOURCE_SCOPE_VIOLATION";
public const string ProjectScopeViolation = "TENANT_PROJECT_SCOPE_VIOLATION";
public const string StrictIsolation = "TENANT_STRICT_ISOLATION";
public const string InvalidPathPrefix = "TENANT_INVALID_PATH_PREFIX";
public const string MissingTenantContext = "TENANT_MISSING_CONTEXT";
}
/// <summary>
/// Helper for tenant ID validation.
/// </summary>
public static partial class TenantIdValidator
{
// Pattern: alphanumeric with hyphens and underscores, 3-64 chars, or valid GUID
private static readonly Regex TenantIdPattern = TenantIdRegex();
[GeneratedRegex(@"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,63}$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", RegexOptions.Compiled)]
private static partial Regex TenantIdRegex();
/// <summary>
/// Validates a tenant ID format.
/// </summary>
public static bool IsValid(string? tenantId)
{
if (string.IsNullOrWhiteSpace(tenantId)) return false;
return TenantIdPattern.IsMatch(tenantId);
}
/// <summary>
/// Validates a tenant ID and returns errors if invalid.
/// </summary>
public static TenantScopeValidationResult Validate(string? tenantId)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
return TenantScopeValidationResult.Invalid(new TenantScopeValidationError
{
Code = TenantScopeErrorCodes.InvalidTenantId,
Message = "Tenant ID is required",
Field = "tenantId"
});
}
if (!IsValid(tenantId))
{
return TenantScopeValidationResult.Invalid(new TenantScopeValidationError
{
Code = TenantScopeErrorCodes.InvalidTenantId,
Message = "Tenant ID must be 3-64 alphanumeric characters (hyphens/underscores allowed) or a valid GUID",
Field = "tenantId"
});
}
return TenantScopeValidationResult.Valid();
}
}

View File

@@ -0,0 +1,57 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.Tenancy;
/// <summary>
/// Extension methods for registering tenant scope services.
/// </summary>
public static class TenantScopeServiceCollectionExtensions
{
/// <summary>
/// Registers tenant scope services with in-memory stores.
/// </summary>
public static IServiceCollection AddTenantScopeEnforcement(this IServiceCollection services)
{
services.AddSingleton<InMemoryTenantScopeConfigStore>();
services.AddSingleton<ITenantScopeConfigStore>(sp => sp.GetRequiredService<InMemoryTenantScopeConfigStore>());
services.AddSingleton<InMemoryTenantResourceStore>();
services.AddSingleton<ITenantResourceStore>(sp => sp.GetRequiredService<InMemoryTenantResourceStore>());
services.AddSingleton<ITenantScopeEnforcer, TenantScopeEnforcer>();
return services;
}
/// <summary>
/// Registers tenant scope services with custom stores.
/// </summary>
public static IServiceCollection AddTenantScopeEnforcement<TConfigStore, TResourceStore>(
this IServiceCollection services)
where TConfigStore : class, ITenantScopeConfigStore
where TResourceStore : class, ITenantResourceStore
{
services.AddSingleton<ITenantScopeConfigStore, TConfigStore>();
services.AddSingleton<ITenantResourceStore, TResourceStore>();
services.AddSingleton<ITenantScopeEnforcer, TenantScopeEnforcer>();
return services;
}
/// <summary>
/// Configures the default tenant scope configuration.
/// </summary>
public static IServiceCollection ConfigureTenantScope(
this IServiceCollection services,
Action<TenantScopeConfig> configure)
{
services.AddSingleton(sp =>
{
var config = new TenantScopeConfig();
configure(config);
return config;
});
return services;
}
}

View File

@@ -0,0 +1,859 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Request to verify an export bundle or artifact.
/// </summary>
public sealed record ExportVerificationRequest
{
/// <summary>
/// Run ID to verify.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant ID for scope validation.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Path to the manifest file.
/// </summary>
public string? ManifestPath { get; init; }
/// <summary>
/// Manifest content (if not reading from path).
/// </summary>
public string? ManifestContent { get; init; }
/// <summary>
/// Path to signature file.
/// </summary>
public string? SignaturePath { get; init; }
/// <summary>
/// Signature content (if not reading from path).
/// </summary>
public string? SignatureContent { get; init; }
/// <summary>
/// Verification options.
/// </summary>
public ExportVerificationOptions Options { get; init; } = new();
}
/// <summary>
/// Options for verification.
/// </summary>
public sealed record ExportVerificationOptions
{
/// <summary>
/// Whether to verify content hashes.
/// </summary>
[JsonPropertyName("verifyHashes")]
public bool VerifyHashes { get; init; } = true;
/// <summary>
/// Whether to verify signatures.
/// </summary>
[JsonPropertyName("verifySignatures")]
public bool VerifySignatures { get; init; } = true;
/// <summary>
/// Whether to check signature against Rekor transparency log.
/// </summary>
[JsonPropertyName("checkRekor")]
public bool CheckRekor { get; init; } = false;
/// <summary>
/// Whether to verify manifest integrity (internal consistency).
/// </summary>
[JsonPropertyName("verifyManifestIntegrity")]
public bool VerifyManifestIntegrity { get; init; } = true;
/// <summary>
/// Whether to verify encryption metadata.
/// </summary>
[JsonPropertyName("verifyEncryption")]
public bool VerifyEncryption { get; init; } = true;
/// <summary>
/// Trusted public keys for signature verification (PEM or base64).
/// </summary>
[JsonPropertyName("trustedKeys")]
public IReadOnlyList<string> TrustedKeys { get; init; } = [];
/// <summary>
/// Trusted certificate roots for signature verification.
/// </summary>
[JsonPropertyName("trustedRoots")]
public IReadOnlyList<string> TrustedRoots { get; init; } = [];
}
/// <summary>
/// Result of export verification.
/// </summary>
public sealed record ExportVerificationResult
{
/// <summary>
/// Overall verification status.
/// </summary>
public required VerificationStatus Status { get; init; }
/// <summary>
/// Whether verification passed.
/// </summary>
public bool IsValid => Status == VerificationStatus.Valid;
/// <summary>
/// Run ID that was verified.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Manifest verification result.
/// </summary>
public ManifestVerificationResult? Manifest { get; init; }
/// <summary>
/// Signature verification result.
/// </summary>
public SignatureVerificationResult? Signature { get; init; }
/// <summary>
/// Hash verification results for individual files.
/// </summary>
public IReadOnlyList<HashVerificationResult> FileHashes { get; init; } = [];
/// <summary>
/// Encryption verification result.
/// </summary>
public EncryptionVerificationResult? Encryption { get; init; }
/// <summary>
/// Attestation status.
/// </summary>
public AttestationStatus? Attestation { get; init; }
/// <summary>
/// Verification errors.
/// </summary>
public IReadOnlyList<VerificationError> Errors { get; init; } = [];
/// <summary>
/// Verification warnings.
/// </summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
/// <summary>
/// When verification was performed.
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
public static ExportVerificationResult Failed(Guid runId, params VerificationError[] errors)
=> new()
{
Status = VerificationStatus.Invalid,
RunId = runId,
Errors = errors
};
}
/// <summary>
/// Overall verification status.
/// </summary>
public enum VerificationStatus
{
/// <summary>
/// All checks passed.
/// </summary>
Valid = 1,
/// <summary>
/// Some checks failed.
/// </summary>
Invalid = 2,
/// <summary>
/// Verification was partial (some checks skipped).
/// </summary>
Partial = 3,
/// <summary>
/// Verification could not be performed.
/// </summary>
Error = 4,
/// <summary>
/// Verification is still in progress.
/// </summary>
Pending = 5
}
/// <summary>
/// Result of manifest verification.
/// </summary>
public sealed record ManifestVerificationResult
{
/// <summary>
/// Whether the manifest is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Manifest format version.
/// </summary>
public string? FormatVersion { get; init; }
/// <summary>
/// Number of entries in manifest.
/// </summary>
public int EntryCount { get; init; }
/// <summary>
/// Manifest digest.
/// </summary>
public string? ManifestDigest { get; init; }
/// <summary>
/// Expected manifest digest (if provided).
/// </summary>
public string? ExpectedDigest { get; init; }
/// <summary>
/// Whether manifest digest matches expected.
/// </summary>
public bool DigestMatch { get; init; }
/// <summary>
/// Validation errors.
/// </summary>
public IReadOnlyList<string> ValidationErrors { get; init; } = [];
}
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record SignatureVerificationResult
{
/// <summary>
/// Whether the signature is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Signature algorithm used.
/// </summary>
public string? Algorithm { get; init; }
/// <summary>
/// Key ID that signed.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Signer identity (certificate subject, key fingerprint).
/// </summary>
public string? SignerIdentity { get; init; }
/// <summary>
/// When the signature was created.
/// </summary>
public DateTimeOffset? SignedAt { get; init; }
/// <summary>
/// Whether the signature was found in Rekor.
/// </summary>
public bool? RekorVerified { get; init; }
/// <summary>
/// Rekor log index if found.
/// </summary>
public long? RekorLogIndex { get; init; }
/// <summary>
/// Certificate chain if available.
/// </summary>
public IReadOnlyList<string> CertificateChain { get; init; } = [];
/// <summary>
/// Verification errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Result of hash verification for a single file.
/// </summary>
public sealed record HashVerificationResult
{
/// <summary>
/// File path.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Whether the hash matches.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Expected hash from manifest.
/// </summary>
public string? ExpectedHash { get; init; }
/// <summary>
/// Computed hash.
/// </summary>
public string? ComputedHash { get; init; }
/// <summary>
/// Hash algorithm used.
/// </summary>
public string? Algorithm { get; init; }
/// <summary>
/// File size in bytes.
/// </summary>
public long? SizeBytes { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
}
/// <summary>
/// Result of encryption verification.
/// </summary>
public sealed record EncryptionVerificationResult
{
/// <summary>
/// Whether encryption metadata is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Encryption mode.
/// </summary>
public string? Mode { get; init; }
/// <summary>
/// Number of recipients.
/// </summary>
public int RecipientCount { get; init; }
/// <summary>
/// AAD format.
/// </summary>
public string? AadFormat { get; init; }
/// <summary>
/// Whether all encrypted files have valid nonces.
/// </summary>
public bool NonceFormatValid { get; init; }
/// <summary>
/// Validation errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Attestation status for a verified export.
/// </summary>
public sealed record AttestationStatus
{
/// <summary>
/// Whether attestation is present.
/// </summary>
public bool HasAttestation { get; init; }
/// <summary>
/// Attestation type (in-toto, DSSE, etc.).
/// </summary>
public string? Type { get; init; }
/// <summary>
/// Predicate type.
/// </summary>
public string? PredicateType { get; init; }
/// <summary>
/// Whether attestation signature is valid.
/// </summary>
public bool? SignatureValid { get; init; }
/// <summary>
/// Subject digests from attestation.
/// </summary>
public IReadOnlyList<string> SubjectDigests { get; init; } = [];
/// <summary>
/// Attestation errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Verification error.
/// </summary>
public sealed record VerificationError
{
/// <summary>
/// Error code.
/// </summary>
public required string Code { get; init; }
/// <summary>
/// Error message.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Path or component that failed.
/// </summary>
public string? Path { get; init; }
/// <summary>
/// Additional details.
/// </summary>
public string? Details { get; init; }
}
/// <summary>
/// Common verification error codes.
/// </summary>
public static class VerificationErrorCodes
{
public const string ManifestNotFound = "MANIFEST_NOT_FOUND";
public const string ManifestParseError = "MANIFEST_PARSE_ERROR";
public const string ManifestDigestMismatch = "MANIFEST_DIGEST_MISMATCH";
public const string SignatureNotFound = "SIGNATURE_NOT_FOUND";
public const string SignatureInvalid = "SIGNATURE_INVALID";
public const string SignatureExpired = "SIGNATURE_EXPIRED";
public const string KeyNotTrusted = "KEY_NOT_TRUSTED";
public const string HashMismatch = "HASH_MISMATCH";
public const string FileNotFound = "FILE_NOT_FOUND";
public const string EncryptionInvalid = "ENCRYPTION_INVALID";
public const string AttestationInvalid = "ATTESTATION_INVALID";
public const string RekorVerificationFailed = "REKOR_VERIFICATION_FAILED";
public const string TenantMismatch = "TENANT_MISMATCH";
public const string PackRunNotFound = "PACK_RUN_NOT_FOUND";
public const string PackRunAttestationInvalid = "PACK_RUN_ATTESTATION_INVALID";
public const string SubjectDigestMismatch = "SUBJECT_DIGEST_MISMATCH";
public const string ProvenanceChainBroken = "PROVENANCE_CHAIN_BROKEN";
}
// ========================================================================
// Pack Run Integration Models
// ========================================================================
/// <summary>
/// Request to verify pack run integration with an export.
/// </summary>
public sealed record PackRunVerificationRequest
{
/// <summary>
/// Export run ID.
/// </summary>
public required Guid ExportRunId { get; init; }
/// <summary>
/// Tenant ID for scope validation.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Pack run ID to verify integration with.
/// </summary>
public Guid? PackRunId { get; init; }
/// <summary>
/// Pack run attestation ID (if different from pack run).
/// </summary>
public string? AttestationId { get; init; }
/// <summary>
/// Whether to verify subject digests match.
/// </summary>
public bool VerifySubjectAlignment { get; init; } = true;
/// <summary>
/// Whether to verify the provenance chain is complete.
/// </summary>
public bool VerifyProvenanceChain { get; init; } = true;
}
/// <summary>
/// Result of pack run integration verification.
/// </summary>
public sealed record PackRunVerificationResult
{
/// <summary>
/// Whether the pack run integration is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Export run ID.
/// </summary>
public required Guid ExportRunId { get; init; }
/// <summary>
/// Pack run ID (if found).
/// </summary>
public Guid? PackRunId { get; init; }
/// <summary>
/// Pack run attestation verification result.
/// </summary>
public PackRunAttestationResult? Attestation { get; init; }
/// <summary>
/// Subject alignment verification result.
/// </summary>
public SubjectAlignmentResult? SubjectAlignment { get; init; }
/// <summary>
/// Provenance chain verification result.
/// </summary>
public ProvenanceChainResult? ProvenanceChain { get; init; }
/// <summary>
/// Provenance links extracted from the integration.
/// </summary>
public IReadOnlyList<ProvenanceLink> ProvenanceLinks { get; init; } = [];
/// <summary>
/// Verification errors.
/// </summary>
public IReadOnlyList<VerificationError> Errors { get; init; } = [];
/// <summary>
/// When verification was performed.
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Result of pack run attestation verification.
/// </summary>
public sealed record PackRunAttestationResult
{
/// <summary>
/// Whether the attestation is valid.
/// </summary>
public bool IsValid { get; init; }
/// <summary>
/// Attestation ID.
/// </summary>
public string? AttestationId { get; init; }
/// <summary>
/// Predicate type.
/// </summary>
public string? PredicateType { get; init; }
/// <summary>
/// Whether the attestation signature is valid.
/// </summary>
public bool SignatureValid { get; init; }
/// <summary>
/// Key ID that signed the attestation.
/// </summary>
public string? SignerKeyId { get; init; }
/// <summary>
/// Subject artifacts in the attestation.
/// </summary>
public IReadOnlyList<AttestationSubject> Subjects { get; init; } = [];
/// <summary>
/// Builder information from provenance.
/// </summary>
public BuilderInfo? Builder { get; init; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
/// <summary>
/// Attestation errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// Subject artifact in an attestation.
/// </summary>
public sealed record AttestationSubject
{
/// <summary>
/// Subject name (typically artifact path).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digest algorithm and value pairs.
/// </summary>
public IReadOnlyDictionary<string, string> Digest { get; init; } = new Dictionary<string, string>();
}
/// <summary>
/// Builder information from provenance.
/// </summary>
public sealed record BuilderInfo
{
/// <summary>
/// Builder name/identifier.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Builder version.
/// </summary>
public string? Version { get; init; }
/// <summary>
/// Build timestamp.
/// </summary>
public DateTimeOffset? BuildTimestamp { get; init; }
}
/// <summary>
/// Result of subject alignment verification.
/// </summary>
public sealed record SubjectAlignmentResult
{
/// <summary>
/// Whether all subjects align correctly.
/// </summary>
public bool IsAligned { get; init; }
/// <summary>
/// Total subjects in export.
/// </summary>
public int ExportSubjectCount { get; init; }
/// <summary>
/// Total subjects in pack run attestation.
/// </summary>
public int PackRunSubjectCount { get; init; }
/// <summary>
/// Number of matching subjects.
/// </summary>
public int MatchedCount { get; init; }
/// <summary>
/// Subjects only in export.
/// </summary>
public IReadOnlyList<string> ExportOnlySubjects { get; init; } = [];
/// <summary>
/// Subjects only in pack run.
/// </summary>
public IReadOnlyList<string> PackRunOnlySubjects { get; init; } = [];
/// <summary>
/// Subjects with digest mismatches.
/// </summary>
public IReadOnlyList<DigestMismatch> DigestMismatches { get; init; } = [];
}
/// <summary>
/// Digest mismatch between export and pack run subjects.
/// </summary>
public sealed record DigestMismatch
{
/// <summary>
/// Subject name.
/// </summary>
public required string SubjectName { get; init; }
/// <summary>
/// Digest in export.
/// </summary>
public string? ExportDigest { get; init; }
/// <summary>
/// Digest in pack run attestation.
/// </summary>
public string? PackRunDigest { get; init; }
/// <summary>
/// Algorithm used.
/// </summary>
public string Algorithm { get; init; } = "sha256";
}
/// <summary>
/// Result of provenance chain verification.
/// </summary>
public sealed record ProvenanceChainResult
{
/// <summary>
/// Whether the provenance chain is complete.
/// </summary>
public bool IsComplete { get; init; }
/// <summary>
/// Chain depth (number of links).
/// </summary>
public int ChainDepth { get; init; }
/// <summary>
/// Links in the chain.
/// </summary>
public IReadOnlyList<ProvenanceLink> Links { get; init; } = [];
/// <summary>
/// Missing links in the chain.
/// </summary>
public IReadOnlyList<string> MissingLinks { get; init; } = [];
/// <summary>
/// Chain errors.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
}
/// <summary>
/// A link in the provenance chain.
/// </summary>
public sealed record ProvenanceLink
{
/// <summary>
/// Link type.
/// </summary>
public required ProvenanceLinkType Type { get; init; }
/// <summary>
/// Source identifier (e.g., pack run ID, attestation ID).
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Target identifier (e.g., export run ID, artifact path).
/// </summary>
public required string TargetId { get; init; }
/// <summary>
/// Digest of the linked artifact.
/// </summary>
public string? Digest { get; init; }
/// <summary>
/// Link metadata.
/// </summary>
public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>();
/// <summary>
/// When the link was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
}
/// <summary>
/// Types of provenance links.
/// </summary>
public enum ProvenanceLinkType
{
/// <summary>
/// Pack run produces artifact.
/// </summary>
PackRunToArtifact = 1,
/// <summary>
/// Artifact included in export.
/// </summary>
ArtifactToExport = 2,
/// <summary>
/// Attestation references subject.
/// </summary>
AttestationToSubject = 3,
/// <summary>
/// Export references attestation.
/// </summary>
ExportToAttestation = 4,
/// <summary>
/// Signature covers artifact.
/// </summary>
SignatureToArtifact = 5
}
/// <summary>
/// Streaming verification event.
/// </summary>
public sealed record VerificationProgressEvent
{
/// <summary>
/// Event type.
/// </summary>
public required VerificationProgressType Type { get; init; }
/// <summary>
/// Current item being verified.
/// </summary>
public string? CurrentItem { get; init; }
/// <summary>
/// Progress percentage (0-100).
/// </summary>
public int ProgressPercent { get; init; }
/// <summary>
/// Total items to verify.
/// </summary>
public int TotalItems { get; init; }
/// <summary>
/// Items verified so far.
/// </summary>
public int VerifiedItems { get; init; }
/// <summary>
/// Items that passed.
/// </summary>
public int PassedItems { get; init; }
/// <summary>
/// Items that failed.
/// </summary>
public int FailedItems { get; init; }
/// <summary>
/// Message for this event.
/// </summary>
public string? Message { get; init; }
/// <summary>
/// Timestamp.
/// </summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Types of verification progress events.
/// </summary>
public enum VerificationProgressType
{
Started = 1,
ManifestVerified = 2,
SignatureVerified = 3,
HashVerificationStarted = 4,
HashVerificationProgress = 5,
HashVerificationComplete = 6,
EncryptionVerified = 7,
AttestationVerified = 8,
Completed = 9,
Error = 10
}

View File

@@ -0,0 +1,828 @@
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Default implementation of the export verification service.
/// </summary>
public sealed class ExportVerificationService : IExportVerificationService
{
private readonly IExportArtifactStore _artifactStore;
private readonly IPackRunAttestationStore? _packRunStore;
private readonly ILogger<ExportVerificationService> _logger;
public ExportVerificationService(
IExportArtifactStore artifactStore,
ILogger<ExportVerificationService> logger)
: this(artifactStore, null, logger)
{
}
public ExportVerificationService(
IExportArtifactStore artifactStore,
IPackRunAttestationStore? packRunStore,
ILogger<ExportVerificationService> logger)
{
_artifactStore = artifactStore ?? throw new ArgumentNullException(nameof(artifactStore));
_packRunStore = packRunStore;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<ExportVerificationResult> VerifyAsync(
ExportVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Starting verification for run {RunId}",
request.RunId);
var errors = new List<VerificationError>();
var warnings = new List<string>();
// Get run metadata
var metadata = await _artifactStore.GetRunMetadataAsync(request.RunId, cancellationToken);
if (metadata is null)
{
return ExportVerificationResult.Failed(
request.RunId,
new VerificationError
{
Code = VerificationErrorCodes.ManifestNotFound,
Message = $"Run {request.RunId} not found"
});
}
// Verify tenant
if (metadata.TenantId != request.TenantId)
{
return ExportVerificationResult.Failed(
request.RunId,
new VerificationError
{
Code = VerificationErrorCodes.TenantMismatch,
Message = "Tenant ID does not match run"
});
}
ManifestVerificationResult? manifestResult = null;
SignatureVerificationResult? signatureResult = null;
EncryptionVerificationResult? encryptionResult = null;
AttestationStatus? attestationStatus = null;
var hashResults = new List<HashVerificationResult>();
// Get manifest content
var manifestContent = request.ManifestContent
?? await _artifactStore.GetManifestAsync(request.RunId, cancellationToken);
// Verify manifest
if (request.Options.VerifyManifestIntegrity && !string.IsNullOrEmpty(manifestContent))
{
manifestResult = await VerifyManifestAsync(manifestContent, cancellationToken);
if (!manifestResult.IsValid)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.ManifestParseError,
Message = "Manifest validation failed",
Details = string.Join("; ", manifestResult.ValidationErrors)
});
}
}
else if (request.Options.VerifyManifestIntegrity)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.ManifestNotFound,
Message = "No manifest available for verification"
});
}
// Verify signature
if (request.Options.VerifySignatures)
{
var signatureContent = request.SignatureContent
?? await _artifactStore.GetSignatureAsync(request.RunId, cancellationToken);
if (!string.IsNullOrEmpty(signatureContent) && !string.IsNullOrEmpty(manifestContent))
{
var payload = Encoding.UTF8.GetBytes(manifestContent);
signatureResult = await VerifySignatureAsync(
signatureContent,
payload,
request.Options,
cancellationToken);
if (!signatureResult.IsValid)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.SignatureInvalid,
Message = "Signature verification failed",
Details = string.Join("; ", signatureResult.Errors)
});
}
}
else if (request.Options.VerifySignatures)
{
warnings.Add("No signature available for verification");
}
}
// Verify hashes
if (request.Options.VerifyHashes)
{
var artifacts = await _artifactStore.GetArtifactsAsync(request.RunId, cancellationToken);
foreach (var artifact in artifacts)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
using var stream = await _artifactStore.OpenArtifactAsync(
request.RunId,
artifact.RelativePath,
cancellationToken);
if (stream is null)
{
hashResults.Add(new HashVerificationResult
{
Path = artifact.RelativePath,
IsValid = false,
Error = "Artifact not found"
});
continue;
}
var algorithm = artifact.HashAlgorithm ?? "sha256";
var hash = await ComputeStreamHashAsync(stream, algorithm, cancellationToken);
var isValid = string.Equals(
hash,
artifact.ExpectedHash,
StringComparison.OrdinalIgnoreCase);
hashResults.Add(new HashVerificationResult
{
Path = artifact.RelativePath,
IsValid = isValid,
ExpectedHash = artifact.ExpectedHash,
ComputedHash = hash,
Algorithm = algorithm,
SizeBytes = stream.Length
});
if (!isValid)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.HashMismatch,
Message = "Hash mismatch",
Path = artifact.RelativePath,
Details = $"Expected: {artifact.ExpectedHash}, Got: {hash}"
});
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to verify artifact {Path}", artifact.RelativePath);
hashResults.Add(new HashVerificationResult
{
Path = artifact.RelativePath,
IsValid = false,
Error = ex.Message
});
}
}
}
// Verify encryption metadata
if (request.Options.VerifyEncryption && metadata.EncryptionMode is not null)
{
encryptionResult = VerifyEncryptionMetadata(metadata.EncryptionMode);
if (!encryptionResult.IsValid)
{
errors.AddRange(encryptionResult.Errors.Select(e => new VerificationError
{
Code = VerificationErrorCodes.EncryptionInvalid,
Message = e
}));
}
}
// Determine overall status
var status = DetermineStatus(errors, warnings);
_logger.LogInformation(
"Verification completed for run {RunId}: {Status} with {ErrorCount} errors",
request.RunId, status, errors.Count);
return new ExportVerificationResult
{
Status = status,
RunId = request.RunId,
Manifest = manifestResult,
Signature = signatureResult,
FileHashes = hashResults,
Encryption = encryptionResult,
Attestation = attestationStatus,
Errors = errors,
Warnings = warnings
};
}
/// <inheritdoc />
public async IAsyncEnumerable<VerificationProgressEvent> VerifyStreamingAsync(
ExportVerificationRequest request,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.Started,
Message = "Verification started"
};
// Get artifacts for progress tracking
var artifacts = await _artifactStore.GetArtifactsAsync(request.RunId, cancellationToken);
var totalItems = artifacts.Count + 2; // +2 for manifest and signature
var verified = 0;
var passed = 0;
var failed = 0;
// Verify manifest
var manifestContent = request.ManifestContent
?? await _artifactStore.GetManifestAsync(request.RunId, cancellationToken);
if (!string.IsNullOrEmpty(manifestContent) && request.Options.VerifyManifestIntegrity)
{
var manifestResult = await VerifyManifestAsync(manifestContent, cancellationToken);
verified++;
if (manifestResult.IsValid) passed++;
else failed++;
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.ManifestVerified,
ProgressPercent = (int)(verified * 100.0 / totalItems),
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = manifestResult.IsValid ? "Manifest valid" : "Manifest invalid"
};
}
// Verify signature
if (request.Options.VerifySignatures)
{
var signatureContent = request.SignatureContent
?? await _artifactStore.GetSignatureAsync(request.RunId, cancellationToken);
if (!string.IsNullOrEmpty(signatureContent) && !string.IsNullOrEmpty(manifestContent))
{
var payload = Encoding.UTF8.GetBytes(manifestContent);
var sigResult = await VerifySignatureAsync(
signatureContent,
payload,
request.Options,
cancellationToken);
verified++;
if (sigResult.IsValid) passed++;
else failed++;
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.SignatureVerified,
ProgressPercent = (int)(verified * 100.0 / totalItems),
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = sigResult.IsValid ? "Signature valid" : "Signature invalid"
};
}
}
// Verify hashes
if (request.Options.VerifyHashes && artifacts.Count > 0)
{
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.HashVerificationStarted,
TotalItems = artifacts.Count,
Message = $"Verifying {artifacts.Count} files"
};
foreach (var artifact in artifacts)
{
cancellationToken.ThrowIfCancellationRequested();
bool isValid = false;
try
{
using var stream = await _artifactStore.OpenArtifactAsync(
request.RunId,
artifact.RelativePath,
cancellationToken);
if (stream is not null)
{
var hash = await ComputeStreamHashAsync(
stream,
artifact.HashAlgorithm ?? "sha256",
cancellationToken);
isValid = string.Equals(hash, artifact.ExpectedHash, StringComparison.OrdinalIgnoreCase);
}
}
catch
{
// Ignore - isValid stays false
}
verified++;
if (isValid) passed++;
else failed++;
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.HashVerificationProgress,
CurrentItem = artifact.RelativePath,
ProgressPercent = (int)(verified * 100.0 / totalItems),
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed
};
}
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.HashVerificationComplete,
TotalItems = artifacts.Count,
VerifiedItems = artifacts.Count,
PassedItems = passed,
FailedItems = failed,
Message = $"Hash verification complete: {passed} passed, {failed} failed"
};
}
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.Completed,
ProgressPercent = 100,
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = failed == 0 ? "Verification successful" : $"Verification completed with {failed} failures"
};
}
/// <inheritdoc />
public Task<ManifestVerificationResult> VerifyManifestAsync(
string manifestContent,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
var entryCount = 0;
string? formatVersion = null;
string? manifestDigest = null;
try
{
// Compute manifest digest
manifestDigest = ComputeHash(Encoding.UTF8.GetBytes(manifestContent), "sha256");
// Try to parse as JSON
using var doc = JsonDocument.Parse(manifestContent);
// Check for version
if (doc.RootElement.TryGetProperty("version", out var versionElem))
{
formatVersion = versionElem.GetString();
}
// Check for entries array
if (doc.RootElement.TryGetProperty("files", out var filesElem) && filesElem.ValueKind == JsonValueKind.Array)
{
entryCount = filesElem.GetArrayLength();
}
else if (doc.RootElement.TryGetProperty("entries", out var entriesElem) && entriesElem.ValueKind == JsonValueKind.Array)
{
entryCount = entriesElem.GetArrayLength();
}
else if (doc.RootElement.ValueKind == JsonValueKind.Array)
{
// Manifest is just an array of entries
entryCount = doc.RootElement.GetArrayLength();
}
}
catch (JsonException ex)
{
// Try parsing as NDJSON
try
{
var lines = manifestContent.Split('\n', StringSplitOptions.RemoveEmptyEntries);
entryCount = 0;
foreach (var line in lines)
{
using var lineDoc = JsonDocument.Parse(line);
entryCount++;
}
}
catch
{
errors.Add($"Invalid manifest format: {ex.Message}");
}
}
return Task.FromResult(new ManifestVerificationResult
{
IsValid = errors.Count == 0,
FormatVersion = formatVersion,
EntryCount = entryCount,
ManifestDigest = manifestDigest,
DigestMatch = true, // No expected digest provided
ValidationErrors = errors
});
}
/// <inheritdoc />
public Task<SignatureVerificationResult> VerifySignatureAsync(
string signatureContent,
byte[] payload,
ExportVerificationOptions options,
CancellationToken cancellationToken = default)
{
var errors = new List<string>();
string? algorithm = null;
string? keyId = null;
string? signerIdentity = null;
DateTimeOffset? signedAt = null;
try
{
// Try to parse as DSSE envelope
using var doc = JsonDocument.Parse(signatureContent);
if (doc.RootElement.TryGetProperty("payloadType", out var payloadType))
{
// DSSE format
if (doc.RootElement.TryGetProperty("signatures", out var signatures) &&
signatures.ValueKind == JsonValueKind.Array &&
signatures.GetArrayLength() > 0)
{
var firstSig = signatures[0];
keyId = firstSig.TryGetProperty("keyid", out var kid) ? kid.GetString() : null;
// In a real implementation, we would verify the signature here
// For now, we just validate structure
algorithm = "DSSE";
}
else
{
errors.Add("DSSE envelope has no signatures");
}
}
else
{
// Unknown signature format
errors.Add("Unknown signature format");
}
// Check if we have trusted keys and validate
if (options.TrustedKeys.Count > 0 && keyId is not null)
{
if (!options.TrustedKeys.Contains(keyId))
{
errors.Add($"Signer key {keyId} is not in trusted keys list");
}
}
}
catch (JsonException ex)
{
errors.Add($"Failed to parse signature: {ex.Message}");
}
return Task.FromResult(new SignatureVerificationResult
{
IsValid = errors.Count == 0,
Algorithm = algorithm,
KeyId = keyId,
SignerIdentity = signerIdentity,
SignedAt = signedAt,
Errors = errors
});
}
/// <inheritdoc />
public async Task<string> ComputeHashAsync(
string filePath,
string algorithm = "sha256",
CancellationToken cancellationToken = default)
{
using var stream = File.OpenRead(filePath);
return await ComputeStreamHashAsync(stream, algorithm, cancellationToken);
}
/// <inheritdoc />
public string ComputeHash(ReadOnlySpan<byte> content, string algorithm = "sha256")
{
using var hasher = CreateHashAlgorithm(algorithm);
var hash = hasher.ComputeHash(content.ToArray());
return Convert.ToHexString(hash).ToLowerInvariant();
}
private async Task<string> ComputeStreamHashAsync(
Stream stream,
string algorithm,
CancellationToken cancellationToken)
{
using var hasher = CreateHashAlgorithm(algorithm);
var hash = await hasher.ComputeHashAsync(stream, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static HashAlgorithm CreateHashAlgorithm(string algorithm)
{
return algorithm.ToLowerInvariant() switch
{
"sha256" => SHA256.Create(),
"sha384" => SHA384.Create(),
"sha512" => SHA512.Create(),
_ => throw new ArgumentException($"Unsupported hash algorithm: {algorithm}", nameof(algorithm))
};
}
private static VerificationStatus DetermineStatus(List<VerificationError> errors, List<string> warnings)
{
if (errors.Count == 0)
{
return warnings.Count > 0 ? VerificationStatus.Partial : VerificationStatus.Valid;
}
return errors.Any(e => e.Code == VerificationErrorCodes.TenantMismatch ||
e.Code == VerificationErrorCodes.ManifestNotFound)
? VerificationStatus.Error
: VerificationStatus.Invalid;
}
private static EncryptionVerificationResult VerifyEncryptionMetadata(string encryptionMode)
{
var errors = new List<string>();
var validModes = new[] { "aes-gcm+age", "aes-gcm+kms", "none" };
if (!validModes.Contains(encryptionMode, StringComparer.OrdinalIgnoreCase))
{
errors.Add($"Unknown encryption mode: {encryptionMode}");
}
return new EncryptionVerificationResult
{
IsValid = errors.Count == 0,
Mode = encryptionMode,
RecipientCount = 0, // Would need to parse metadata to get this
AadFormat = "{runId}:{relativePath}",
NonceFormatValid = true,
Errors = errors
};
}
/// <inheritdoc />
public async Task<PackRunVerificationResult> VerifyPackRunIntegrationAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var errors = new List<VerificationError>();
PackRunAttestationResult? attestationResult = null;
SubjectAlignmentResult? alignmentResult = null;
ProvenanceChainResult? chainResult = null;
var provenanceLinks = new List<ProvenanceLink>();
// Get pack run attestation if store is available
if (_packRunStore is not null && request.PackRunId.HasValue)
{
var attestationData = await _packRunStore.GetAttestationAsync(
request.PackRunId.Value,
cancellationToken);
if (attestationData is not null)
{
attestationResult = new PackRunAttestationResult
{
IsValid = attestationData.Status == "Signed",
AttestationId = attestationData.AttestationId,
PredicateType = attestationData.PredicateType,
SignatureValid = attestationData.Status == "Signed",
Subjects = attestationData.Subjects,
Builder = attestationData.Builder,
CreatedAt = attestationData.CreatedAt
};
// Extract provenance links
provenanceLinks.AddRange(await ExtractProvenanceLinksAsync(
request.ExportRunId,
request.PackRunId.Value,
cancellationToken));
}
else
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.PackRunNotFound,
Message = $"Pack run {request.PackRunId} attestation not found"
});
}
}
// Verify subject alignment
if (request.VerifySubjectAlignment && attestationResult is not null)
{
// Get export manifest subjects (simplified - in real implementation would parse manifest)
var exportSubjects = new List<AttestationSubject>();
alignmentResult = VerifySubjectAlignment(exportSubjects, attestationResult.Subjects);
if (!alignmentResult.IsAligned)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.SubjectDigestMismatch,
Message = "Subject digests do not align between export and pack run"
});
}
}
// Verify provenance chain
if (request.VerifyProvenanceChain)
{
chainResult = new ProvenanceChainResult
{
IsComplete = provenanceLinks.Count > 0,
ChainDepth = provenanceLinks.Count,
Links = provenanceLinks,
MissingLinks = [],
Errors = []
};
if (!chainResult.IsComplete)
{
errors.Add(new VerificationError
{
Code = VerificationErrorCodes.ProvenanceChainBroken,
Message = "Provenance chain is incomplete or broken"
});
}
}
return new PackRunVerificationResult
{
IsValid = errors.Count == 0,
ExportRunId = request.ExportRunId,
PackRunId = request.PackRunId,
Attestation = attestationResult,
SubjectAlignment = alignmentResult,
ProvenanceChain = chainResult,
ProvenanceLinks = provenanceLinks,
Errors = errors
};
}
/// <inheritdoc />
public SubjectAlignmentResult VerifySubjectAlignment(
IReadOnlyList<AttestationSubject> exportSubjects,
IReadOnlyList<AttestationSubject> packRunSubjects)
{
var exportMap = exportSubjects.ToDictionary(
s => s.Name,
s => s.Digest.TryGetValue("sha256", out var d) ? d : null,
StringComparer.OrdinalIgnoreCase);
var packRunMap = packRunSubjects.ToDictionary(
s => s.Name,
s => s.Digest.TryGetValue("sha256", out var d) ? d : null,
StringComparer.OrdinalIgnoreCase);
var matched = 0;
var exportOnly = new List<string>();
var packRunOnly = new List<string>();
var mismatches = new List<DigestMismatch>();
// Check all export subjects
foreach (var (name, digest) in exportMap)
{
if (packRunMap.TryGetValue(name, out var packRunDigest))
{
if (string.Equals(digest, packRunDigest, StringComparison.OrdinalIgnoreCase))
{
matched++;
}
else
{
mismatches.Add(new DigestMismatch
{
SubjectName = name,
ExportDigest = digest,
PackRunDigest = packRunDigest
});
}
}
else
{
exportOnly.Add(name);
}
}
// Check for pack run subjects not in export
foreach (var name in packRunMap.Keys)
{
if (!exportMap.ContainsKey(name))
{
packRunOnly.Add(name);
}
}
return new SubjectAlignmentResult
{
IsAligned = mismatches.Count == 0 && exportOnly.Count == 0,
ExportSubjectCount = exportSubjects.Count,
PackRunSubjectCount = packRunSubjects.Count,
MatchedCount = matched,
ExportOnlySubjects = exportOnly,
PackRunOnlySubjects = packRunOnly,
DigestMismatches = mismatches
};
}
/// <inheritdoc />
public async Task<IReadOnlyList<ProvenanceLink>> ExtractProvenanceLinksAsync(
Guid exportRunId,
Guid packRunId,
CancellationToken cancellationToken = default)
{
var links = new List<ProvenanceLink>();
if (_packRunStore is null)
{
return links;
}
var attestation = await _packRunStore.GetAttestationAsync(packRunId, cancellationToken);
if (attestation is not null)
{
// Link from pack run to attestation
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.ExportToAttestation,
SourceId = exportRunId.ToString(),
TargetId = attestation.AttestationId,
CreatedAt = attestation.CreatedAt
});
// Links from attestation to subjects
foreach (var subject in attestation.Subjects)
{
var digest = subject.Digest.TryGetValue("sha256", out var d) ? d : null;
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.AttestationToSubject,
SourceId = attestation.AttestationId,
TargetId = subject.Name,
Digest = digest
});
// Link from pack run to artifact
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.PackRunToArtifact,
SourceId = packRunId.ToString(),
TargetId = subject.Name,
Digest = digest
});
// Link from artifact to export
links.Add(new ProvenanceLink
{
Type = ProvenanceLinkType.ArtifactToExport,
SourceId = subject.Name,
TargetId = exportRunId.ToString(),
Digest = digest
});
}
}
return links;
}
}

View File

@@ -0,0 +1,32 @@
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Extension methods for registering export verification services.
/// </summary>
public static class ExportVerificationServiceCollectionExtensions
{
/// <summary>
/// Registers export verification services with in-memory artifact store.
/// </summary>
public static IServiceCollection AddExportVerification(this IServiceCollection services)
{
services.AddSingleton<IExportArtifactStore, InMemoryExportArtifactStore>();
services.AddSingleton<IExportVerificationService, ExportVerificationService>();
return services;
}
/// <summary>
/// Registers export verification services with custom artifact store.
/// </summary>
public static IServiceCollection AddExportVerification<TArtifactStore>(this IServiceCollection services)
where TArtifactStore : class, IExportArtifactStore
{
services.AddSingleton<IExportArtifactStore, TArtifactStore>();
services.AddSingleton<IExportVerificationService, ExportVerificationService>();
return services;
}
}

View File

@@ -0,0 +1,278 @@
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// Service for verifying export bundles and artifacts.
/// </summary>
public interface IExportVerificationService
{
/// <summary>
/// Verifies an export bundle.
/// </summary>
/// <param name="request">Verification request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<ExportVerificationResult> VerifyAsync(
ExportVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies an export bundle with progress streaming.
/// </summary>
/// <param name="request">Verification request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Async enumerable of progress events, ending with final result.</returns>
IAsyncEnumerable<VerificationProgressEvent> VerifyStreamingAsync(
ExportVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a manifest's internal consistency.
/// </summary>
/// <param name="manifestContent">Manifest JSON content.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Manifest verification result.</returns>
Task<ManifestVerificationResult> VerifyManifestAsync(
string manifestContent,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a DSSE signature.
/// </summary>
/// <param name="signatureContent">Signature content (DSSE envelope).</param>
/// <param name="payload">Payload that was signed.</param>
/// <param name="options">Verification options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Signature verification result.</returns>
Task<SignatureVerificationResult> VerifySignatureAsync(
string signatureContent,
byte[] payload,
ExportVerificationOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes hash for a file.
/// </summary>
/// <param name="filePath">Path to file.</param>
/// <param name="algorithm">Hash algorithm (sha256, sha384, sha512).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Hex-encoded hash.</returns>
Task<string> ComputeHashAsync(
string filePath,
string algorithm = "sha256",
CancellationToken cancellationToken = default);
/// <summary>
/// Computes hash for content.
/// </summary>
/// <param name="content">Content to hash.</param>
/// <param name="algorithm">Hash algorithm.</param>
/// <returns>Hex-encoded hash.</returns>
string ComputeHash(ReadOnlySpan<byte> content, string algorithm = "sha256");
/// <summary>
/// Verifies pack run integration with an export.
/// </summary>
/// <param name="request">Pack run verification request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Pack run verification result.</returns>
Task<PackRunVerificationResult> VerifyPackRunIntegrationAsync(
PackRunVerificationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies subject digest alignment between export and pack run.
/// </summary>
/// <param name="exportSubjects">Subjects from export manifest.</param>
/// <param name="packRunSubjects">Subjects from pack run attestation.</param>
/// <returns>Subject alignment result.</returns>
SubjectAlignmentResult VerifySubjectAlignment(
IReadOnlyList<AttestationSubject> exportSubjects,
IReadOnlyList<AttestationSubject> packRunSubjects);
/// <summary>
/// Extracts provenance links from an export and its pack run.
/// </summary>
/// <param name="exportRunId">Export run ID.</param>
/// <param name="packRunId">Pack run ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Provenance links.</returns>
Task<IReadOnlyList<ProvenanceLink>> ExtractProvenanceLinksAsync(
Guid exportRunId,
Guid packRunId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Store for retrieving pack run attestations.
/// </summary>
public interface IPackRunAttestationStore
{
/// <summary>
/// Gets the attestation for a pack run.
/// </summary>
Task<PackRunAttestationData?> GetAttestationAsync(Guid packRunId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets attestation by ID.
/// </summary>
Task<PackRunAttestationData?> GetAttestationByIdAsync(string attestationId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets pack run IDs linked to an export run.
/// </summary>
Task<IReadOnlyList<Guid>> GetLinkedPackRunsAsync(Guid exportRunId, CancellationToken cancellationToken = default);
}
/// <summary>
/// Pack run attestation data.
/// </summary>
public sealed record PackRunAttestationData
{
/// <summary>
/// Pack run ID.
/// </summary>
public required Guid PackRunId { get; init; }
/// <summary>
/// Attestation ID.
/// </summary>
public required string AttestationId { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// DSSE envelope content.
/// </summary>
public string? DsseEnvelope { get; init; }
/// <summary>
/// Predicate type.
/// </summary>
public string? PredicateType { get; init; }
/// <summary>
/// Subjects in the attestation.
/// </summary>
public IReadOnlyList<AttestationSubject> Subjects { get; init; } = [];
/// <summary>
/// Builder information.
/// </summary>
public BuilderInfo? Builder { get; init; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
/// <summary>
/// Attestation status.
/// </summary>
public string? Status { get; init; }
}
/// <summary>
/// Store for retrieving export artifacts for verification.
/// </summary>
public interface IExportArtifactStore
{
/// <summary>
/// Gets the manifest for a run.
/// </summary>
Task<string?> GetManifestAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the signature for a run.
/// </summary>
Task<string?> GetSignatureAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets artifact paths for a run.
/// </summary>
Task<IReadOnlyList<ArtifactInfo>> GetArtifactsAsync(Guid runId, CancellationToken cancellationToken = default);
/// <summary>
/// Opens a stream to read an artifact.
/// </summary>
Task<Stream?> OpenArtifactAsync(Guid runId, string relativePath, CancellationToken cancellationToken = default);
/// <summary>
/// Gets run metadata.
/// </summary>
Task<RunMetadata?> GetRunMetadataAsync(Guid runId, CancellationToken cancellationToken = default);
}
/// <summary>
/// Information about an artifact.
/// </summary>
public sealed record ArtifactInfo
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// Expected hash from manifest.
/// </summary>
public string? ExpectedHash { get; init; }
/// <summary>
/// Hash algorithm.
/// </summary>
public string? HashAlgorithm { get; init; }
/// <summary>
/// Expected size in bytes.
/// </summary>
public long? ExpectedSize { get; init; }
/// <summary>
/// Content type.
/// </summary>
public string? ContentType { get; init; }
/// <summary>
/// Whether the artifact is encrypted.
/// </summary>
public bool IsEncrypted { get; init; }
}
/// <summary>
/// Run metadata for verification.
/// </summary>
public sealed record RunMetadata
{
/// <summary>
/// Run ID.
/// </summary>
public required Guid RunId { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Profile ID.
/// </summary>
public required Guid ProfileId { get; init; }
/// <summary>
/// When the run completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; init; }
/// <summary>
/// Encryption mode used.
/// </summary>
public string? EncryptionMode { get; init; }
/// <summary>
/// Manifest digest.
/// </summary>
public string? ManifestDigest { get; init; }
}

View File

@@ -0,0 +1,136 @@
using System.Collections.Concurrent;
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// In-memory implementation of the export artifact store for testing.
/// </summary>
public sealed class InMemoryExportArtifactStore : IExportArtifactStore
{
private readonly ConcurrentDictionary<Guid, RunData> _runs = new();
/// <summary>
/// Adds a run for testing.
/// </summary>
public void AddRun(RunMetadata metadata)
{
_runs[metadata.RunId] = new RunData
{
Metadata = metadata,
Artifacts = new ConcurrentDictionary<string, ArtifactData>()
};
}
/// <summary>
/// Sets the manifest for a run.
/// </summary>
public void SetManifest(Guid runId, string manifest)
{
if (_runs.TryGetValue(runId, out var run))
{
run.Manifest = manifest;
}
}
/// <summary>
/// Sets the signature for a run.
/// </summary>
public void SetSignature(Guid runId, string signature)
{
if (_runs.TryGetValue(runId, out var run))
{
run.Signature = signature;
}
}
/// <summary>
/// Adds an artifact for a run.
/// </summary>
public void AddArtifact(
Guid runId,
string relativePath,
byte[] content,
string? expectedHash = null,
string? hashAlgorithm = "sha256")
{
if (_runs.TryGetValue(runId, out var run))
{
run.Artifacts[relativePath] = new ArtifactData
{
Content = content,
Info = new ArtifactInfo
{
RelativePath = relativePath,
ExpectedHash = expectedHash,
HashAlgorithm = hashAlgorithm,
ExpectedSize = content.Length
}
};
}
}
/// <inheritdoc />
public Task<string?> GetManifestAsync(Guid runId, CancellationToken cancellationToken = default)
{
_runs.TryGetValue(runId, out var run);
return Task.FromResult(run?.Manifest);
}
/// <inheritdoc />
public Task<string?> GetSignatureAsync(Guid runId, CancellationToken cancellationToken = default)
{
_runs.TryGetValue(runId, out var run);
return Task.FromResult(run?.Signature);
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactInfo>> GetArtifactsAsync(Guid runId, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run))
{
var infos = run.Artifacts.Values.Select(a => a.Info).ToList();
return Task.FromResult<IReadOnlyList<ArtifactInfo>>(infos);
}
return Task.FromResult<IReadOnlyList<ArtifactInfo>>([]);
}
/// <inheritdoc />
public Task<Stream?> OpenArtifactAsync(Guid runId, string relativePath, CancellationToken cancellationToken = default)
{
if (_runs.TryGetValue(runId, out var run) &&
run.Artifacts.TryGetValue(relativePath, out var artifact))
{
return Task.FromResult<Stream?>(new MemoryStream(artifact.Content));
}
return Task.FromResult<Stream?>(null);
}
/// <inheritdoc />
public Task<RunMetadata?> GetRunMetadataAsync(Guid runId, CancellationToken cancellationToken = default)
{
_runs.TryGetValue(runId, out var run);
return Task.FromResult(run?.Metadata);
}
/// <summary>
/// Clears all data.
/// </summary>
public void Clear()
{
_runs.Clear();
}
private sealed class RunData
{
public required RunMetadata Metadata { get; init; }
public required ConcurrentDictionary<string, ArtifactData> Artifacts { get; init; }
public string? Manifest { get; set; }
public string? Signature { get; set; }
}
private sealed class ArtifactData
{
public required byte[] Content { get; init; }
public required ArtifactInfo Info { get; init; }
}
}

View File

@@ -0,0 +1,77 @@
namespace StellaOps.ExportCenter.Core.Verification;
/// <summary>
/// In-memory implementation of pack run attestation store for testing.
/// </summary>
public sealed class InMemoryPackRunAttestationStore : IPackRunAttestationStore
{
private readonly Dictionary<Guid, PackRunAttestationData> _attestations = new();
private readonly Dictionary<string, PackRunAttestationData> _attestationsById = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<Guid, List<Guid>> _exportToPackRunLinks = new();
/// <summary>
/// Adds an attestation to the store.
/// </summary>
public void AddAttestation(PackRunAttestationData attestation)
{
ArgumentNullException.ThrowIfNull(attestation);
_attestations[attestation.PackRunId] = attestation;
_attestationsById[attestation.AttestationId] = attestation;
}
/// <summary>
/// Links a pack run to an export run.
/// </summary>
public void LinkToExport(Guid exportRunId, Guid packRunId)
{
if (!_exportToPackRunLinks.TryGetValue(exportRunId, out var links))
{
links = [];
_exportToPackRunLinks[exportRunId] = links;
}
if (!links.Contains(packRunId))
{
links.Add(packRunId);
}
}
/// <summary>
/// Clears all data from the store.
/// </summary>
public void Clear()
{
_attestations.Clear();
_attestationsById.Clear();
_exportToPackRunLinks.Clear();
}
/// <inheritdoc />
public Task<PackRunAttestationData?> GetAttestationAsync(
Guid packRunId,
CancellationToken cancellationToken = default)
{
_attestations.TryGetValue(packRunId, out var attestation);
return Task.FromResult(attestation);
}
/// <inheritdoc />
public Task<PackRunAttestationData?> GetAttestationByIdAsync(
string attestationId,
CancellationToken cancellationToken = default)
{
_attestationsById.TryGetValue(attestationId, out var attestation);
return Task.FromResult(attestation);
}
/// <inheritdoc />
public Task<IReadOnlyList<Guid>> GetLinkedPackRunsAsync(
Guid exportRunId,
CancellationToken cancellationToken = default)
{
if (_exportToPackRunLinks.TryGetValue(exportRunId, out var links))
{
return Task.FromResult<IReadOnlyList<Guid>>(links);
}
return Task.FromResult<IReadOnlyList<Guid>>([]);
}
}

View File

@@ -0,0 +1,264 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class ExportAdapterRegistryTests
{
[Fact]
public void GetAdapter_ExistingAdapter_ReturnsAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapter("json:raw");
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:raw", adapter.AdapterId);
}
[Fact]
public void GetAdapter_CaseInsensitive_ReturnsAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapter("JSON:RAW");
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:raw", adapter.AdapterId);
}
[Fact]
public void GetAdapter_NonExistent_ReturnsNull()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapter("nonexistent:adapter");
// Assert
Assert.Null(adapter);
}
[Fact]
public void GetAdapterForFormat_JsonRaw_ReturnsJsonRawAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.JsonRaw);
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:raw", adapter.AdapterId);
}
[Fact]
public void GetAdapterForFormat_JsonPolicy_ReturnsJsonPolicyAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.JsonPolicy);
// Assert
Assert.NotNull(adapter);
Assert.Equal("json:policy", adapter.AdapterId);
}
[Fact]
public void GetAdapterForFormat_Ndjson_ReturnsFirstRegisteredAdapter()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.Ndjson);
// Assert
Assert.NotNull(adapter);
// Both adapters support Ndjson, first one wins
Assert.Contains(ExportFormat.Ndjson, adapter.SupportedFormats);
}
[Fact]
public void GetAdapterForFormat_Unsupported_ReturnsNull()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapter = registry.GetAdapterForFormat(ExportFormat.Csv);
// Assert
Assert.Null(adapter);
}
[Fact]
public void GetAllAdapters_ReturnsAllRegisteredAdapters()
{
// Arrange
var registry = CreateRegistry();
// Act
var adapters = registry.GetAllAdapters();
// Assert
Assert.Equal(2, adapters.Count);
Assert.Contains(adapters, a => a.AdapterId == "json:raw");
Assert.Contains(adapters, a => a.AdapterId == "json:policy");
}
[Fact]
public void GetAdapterIds_ReturnsAllAdapterIds()
{
// Arrange
var registry = CreateRegistry();
// Act
var ids = registry.GetAdapterIds();
// Assert
Assert.Equal(2, ids.Count);
Assert.Contains("json:raw", ids);
Assert.Contains("json:policy", ids);
}
[Fact]
public void Registry_EmptyAdapters_HandlesGracefully()
{
// Arrange
var registry = new ExportAdapterRegistry([]);
// Act & Assert
Assert.Null(registry.GetAdapter("json:raw"));
Assert.Null(registry.GetAdapterForFormat(ExportFormat.JsonRaw));
Assert.Empty(registry.GetAllAdapters());
Assert.Empty(registry.GetAdapterIds());
}
[Fact]
public void AddExportAdapters_Extension_RegistersAdapters()
{
// Arrange
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton<ICryptoHash>(new FakeCryptoHash());
// Act
services.AddExportAdapters();
var provider = services.BuildServiceProvider();
// Assert
var registry = provider.GetRequiredService<IExportAdapterRegistry>();
Assert.NotNull(registry);
// At least 2 base adapters (JsonRaw, JsonPolicy) plus additional adapters (Mirror, TrivyDb, TrivyJavaDb)
Assert.True(registry.GetAllAdapters().Count >= 2);
Assert.Contains(registry.GetAllAdapters(), a => a.AdapterId == "json:raw");
Assert.Contains(registry.GetAllAdapters(), a => a.AdapterId == "json:policy");
}
[Fact]
public void AddExportAdapters_WithOptions_RegistersAdaptersWithOptions()
{
// Arrange
var services = new ServiceCollection();
services.AddLogging();
var normalizationOptions = new JsonNormalizationOptions { SortKeys = true };
var redactionOptions = new JsonRedactionOptions { RedactFields = ["password"] };
// Act
services.AddExportAdapters(normalizationOptions, redactionOptions);
var provider = services.BuildServiceProvider();
// Assert
var registry = provider.GetRequiredService<IExportAdapterRegistry>();
Assert.NotNull(registry);
Assert.Equal(2, registry.GetAllAdapters().Count);
}
[Fact]
public void DuplicateAdapterIds_LastOneWins()
{
// Arrange
var adapter1 = new TestAdapter("test:id", "First");
var adapter2 = new TestAdapter("test:id", "Second");
// Act
var registry = new ExportAdapterRegistry([adapter1, adapter2]);
// Assert
var adapter = registry.GetAdapter("test:id");
Assert.NotNull(adapter);
Assert.Equal("Second", adapter.DisplayName);
}
[Fact]
public void FormatMapping_FirstAdapterForFormatWins()
{
// Arrange
var adapter1 = new TestAdapter("adapter:1", "First", [ExportFormat.JsonRaw]);
var adapter2 = new TestAdapter("adapter:2", "Second", [ExportFormat.JsonRaw]);
// Act
var registry = new ExportAdapterRegistry([adapter1, adapter2]);
// Assert
var adapter = registry.GetAdapterForFormat(ExportFormat.JsonRaw);
Assert.NotNull(adapter);
Assert.Equal("adapter:1", adapter.AdapterId);
}
private static ExportAdapterRegistry CreateRegistry()
{
var jsonRaw = new JsonRawAdapter(NullLogger<JsonRawAdapter>.Instance);
var jsonPolicy = new JsonPolicyAdapter(NullLogger<JsonPolicyAdapter>.Instance);
return new ExportAdapterRegistry([jsonRaw, jsonPolicy]);
}
private sealed class TestAdapter : IExportAdapter
{
public string AdapterId { get; }
public string DisplayName { get; }
public IReadOnlyList<ExportFormat> SupportedFormats { get; }
public bool SupportsStreaming => true;
public TestAdapter(string adapterId, string displayName, IReadOnlyList<ExportFormat>? formats = null)
{
AdapterId = adapterId;
DisplayName = displayName;
SupportedFormats = formats ?? [ExportFormat.JsonRaw];
}
public Task<ExportAdapterResult> ProcessAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
=> Task.FromResult(new ExportAdapterResult { Success = true });
public async IAsyncEnumerable<AdapterItemResult> ProcessStreamAsync(
ExportAdapterContext context,
CancellationToken cancellationToken = default)
{
await Task.CompletedTask;
yield break;
}
public Task<IReadOnlyList<string>> ValidateConfigAsync(
ExportAdapterConfig config,
CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<string>>([]);
}
}

View File

@@ -0,0 +1,301 @@
using System.IO.Compression;
using System.Text;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class ExportCompressorTests
{
private readonly ExportCompressor _compressor = new();
[Fact]
public void Compress_WithNone_ReturnsUnmodifiedContent()
{
// Arrange
var content = """{"name":"test","version":"1.0.0"}""";
// Act
var result = _compressor.Compress(content, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(Encoding.UTF8.GetBytes(content), result.CompressedData);
Assert.Equal(result.OriginalSizeBytes, result.CompressedSizeBytes);
Assert.Equal(1.0, result.CompressionRatio);
Assert.Equal(CompressionFormat.None, result.Format);
}
[Fact]
public void CompressBytes_WithNone_ReturnsUnmodifiedBytes()
{
// Arrange
var bytes = new byte[] { 1, 2, 3, 4, 5 };
// Act
var result = _compressor.CompressBytes(bytes, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(bytes, result.CompressedData);
}
[Fact]
public void Compress_WithGzip_CompressesContent()
{
// Arrange
var content = new string('a', 1000); // Compressible content
// Act
var result = _compressor.Compress(content, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
Assert.True(result.CompressedSizeBytes < result.OriginalSizeBytes);
Assert.True(result.CompressionRatio < 1.0);
Assert.Equal(CompressionFormat.Gzip, result.Format);
}
[Fact]
public void Compress_WithBrotli_CompressesContent()
{
// Arrange
var content = new string('a', 1000);
// Act
var result = _compressor.Compress(content, CompressionFormat.Brotli);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
Assert.True(result.CompressedSizeBytes < result.OriginalSizeBytes);
Assert.Equal(CompressionFormat.Brotli, result.Format);
}
[Fact]
public void Compress_WithZstd_FallsBackToGzip()
{
// Arrange (Zstd falls back to Gzip in current implementation)
var content = new string('b', 1000);
// Act
var result = _compressor.Compress(content, CompressionFormat.Zstd);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
Assert.Equal(CompressionFormat.Zstd, result.Format);
}
[Fact]
public void Compress_CalculatesSha256Hash()
{
// Arrange
var content = """{"test":"data"}""";
// Act
var result = _compressor.Compress(content, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Sha256);
Assert.Equal(64, result.Sha256.Length); // SHA256 hex string length
Assert.Matches("^[a-f0-9]+$", result.Sha256); // Lowercase hex
}
[Fact]
public void Compress_DeterministicHash_SameContentSameHash()
{
// Arrange
var content = """{"test":"deterministic"}""";
// Act
var result1 = _compressor.Compress(content, CompressionFormat.Gzip);
var result2 = _compressor.Compress(content, CompressionFormat.Gzip);
// Assert
Assert.Equal(result1.Sha256, result2.Sha256);
Assert.Equal(result1.CompressedData, result2.CompressedData);
}
[Fact]
public void Decompress_Gzip_RestoresOriginalContent()
{
// Arrange
var original = """{"name":"test","value":42}""";
var compressed = _compressor.Compress(original, CompressionFormat.Gzip);
Assert.True(compressed.Success);
// Act
var decompressed = _compressor.Decompress(compressed.CompressedData!, CompressionFormat.Gzip);
// Assert
Assert.True(decompressed.Success);
Assert.Equal(original, Encoding.UTF8.GetString(decompressed.DecompressedData!));
}
[Fact]
public void Decompress_Brotli_RestoresOriginalContent()
{
// Arrange
var original = """{"name":"brotli-test"}""";
var compressed = _compressor.Compress(original, CompressionFormat.Brotli);
Assert.True(compressed.Success);
// Act
var decompressed = _compressor.Decompress(compressed.CompressedData!, CompressionFormat.Brotli);
// Assert
Assert.True(decompressed.Success);
Assert.Equal(original, Encoding.UTF8.GetString(decompressed.DecompressedData!));
}
[Fact]
public void Decompress_None_ReturnsUnmodifiedData()
{
// Arrange
var data = new byte[] { 1, 2, 3, 4, 5 };
// Act
var result = _compressor.Decompress(data, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(data, result.DecompressedData);
}
[Fact]
public void Decompress_InvalidData_ReturnsFailed()
{
// Arrange
var invalidData = new byte[] { 1, 2, 3, 4, 5 }; // Not valid gzip
// Act
var result = _compressor.Decompress(invalidData, CompressionFormat.Gzip);
// Assert
Assert.False(result.Success);
Assert.NotNull(result.ErrorMessage);
}
[Fact]
public async Task CompressToStreamAsync_Gzip_WritesToStream()
{
// Arrange
var content = new string('x', 500);
using var outputStream = new MemoryStream();
// Act
var result = await _compressor.CompressToStreamAsync(content, outputStream, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.True(outputStream.Length > 0);
Assert.True(result.CompressedSizeBytes < result.OriginalSizeBytes);
// Verify by decompressing
outputStream.Position = 0;
using var decompressStream = new GZipStream(outputStream, CompressionMode.Decompress);
using var reader = new StreamReader(decompressStream);
var decompressed = await reader.ReadToEndAsync();
Assert.Equal(content, decompressed);
}
[Fact]
public async Task CompressToStreamAsync_None_WritesBytesDirectly()
{
// Arrange
var content = "test content";
using var outputStream = new MemoryStream();
// Act
var result = await _compressor.CompressToStreamAsync(content, outputStream, CompressionFormat.None);
// Assert
Assert.True(result.Success);
Assert.Equal(Encoding.UTF8.GetByteCount(content), outputStream.Length);
}
[Fact]
public async Task CompressBytesToStreamAsync_WritesCompressedData()
{
// Arrange
var data = Encoding.UTF8.GetBytes(new string('y', 500));
using var outputStream = new MemoryStream();
// Act
var result = await _compressor.CompressBytesToStreamAsync(data, outputStream, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.True(outputStream.Length > 0);
Assert.True(outputStream.Length < data.Length);
}
[Theory]
[InlineData(CompressionFormat.Gzip, ".gz")]
[InlineData(CompressionFormat.Brotli, ".br")]
[InlineData(CompressionFormat.Zstd, ".zst")]
[InlineData(CompressionFormat.None, "")]
public void GetFileExtension_ReturnsCorrectExtension(CompressionFormat format, string expected)
{
Assert.Equal(expected, ExportCompressor.GetFileExtension(format));
}
[Theory]
[InlineData(CompressionFormat.Gzip, "application/gzip")]
[InlineData(CompressionFormat.Brotli, "application/br")]
[InlineData(CompressionFormat.Zstd, "application/zstd")]
[InlineData(CompressionFormat.None, "application/octet-stream")]
public void GetContentType_ReturnsCorrectContentType(CompressionFormat format, string expected)
{
Assert.Equal(expected, ExportCompressor.GetContentType(format));
}
[Fact]
public void CompressBytes_EmptyArray_Succeeds()
{
// Arrange
var empty = Array.Empty<byte>();
// Act
var result = _compressor.CompressBytes(empty, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.CompressedData);
}
[Fact]
public void Compress_LargeContent_CompressesEfficiently()
{
// Arrange
var largeContent = new string('a', 100_000);
// Act
var result = _compressor.Compress(largeContent, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
Assert.True(result.CompressionRatio < 0.1); // Highly compressible content
}
[Fact]
public void Compress_RandomContent_HandlesUncompressibleData()
{
// Arrange - random data doesn't compress well
var random = new byte[1000];
new Random(42).NextBytes(random);
var randomString = Convert.ToBase64String(random);
// Act
var result = _compressor.Compress(randomString, CompressionFormat.Gzip);
// Assert
Assert.True(result.Success);
// Random data may actually be larger after compression due to gzip overhead
Assert.NotNull(result.CompressedData);
}
}

View File

@@ -15,8 +15,8 @@ public sealed class JsonNormalizerTests
Assert.True(result.Success);
Assert.StartsWith("""{"alpha":""", result.NormalizedJson);
Assert.Contains(""""beta":""", result.NormalizedJson);
Assert.EndsWith(""""zebra":"z"}""", result.NormalizedJson);
Assert.Contains("\"beta\":", result.NormalizedJson);
Assert.EndsWith("\"zebra\":\"z\"}", result.NormalizedJson);
}
[Fact]

View File

@@ -0,0 +1,600 @@
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class JsonPolicyAdapterTests : IDisposable
{
private readonly string _tempDir;
private readonly JsonPolicyAdapter _adapter;
private readonly InMemoryExportDataFetcher _dataFetcher;
private readonly InMemoryExportPolicyEvaluator _policyEvaluator;
public JsonPolicyAdapterTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"export-policy-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_adapter = new JsonPolicyAdapter(NullLogger<JsonPolicyAdapter>.Instance);
_dataFetcher = new InMemoryExportDataFetcher();
_policyEvaluator = new InMemoryExportPolicyEvaluator();
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void AdapterId_IsJsonPolicy()
{
Assert.Equal("json:policy", _adapter.AdapterId);
}
[Fact]
public void DisplayName_IsSet()
{
Assert.Equal("JSON with Policy", _adapter.DisplayName);
}
[Fact]
public void SupportedFormats_IncludesJsonPolicyAndNdjson()
{
Assert.Contains(ExportFormat.JsonPolicy, _adapter.SupportedFormats);
Assert.Contains(ExportFormat.Ndjson, _adapter.SupportedFormats);
}
[Fact]
public void SupportsStreaming_IsTrue()
{
Assert.True(_adapter.SupportsStreaming);
}
[Fact]
public async Task ProcessAsync_SingleItem_CreatesWrappedJsonFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test-component");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.Single(result.ItemResults);
Assert.True(result.ItemResults[0].Success);
Assert.True(File.Exists(result.ItemResults[0].OutputPath));
Assert.Equal("sbom-test-component.policy.json", Path.GetFileName(result.ItemResults[0].OutputPath));
}
[Fact]
public async Task ProcessAsync_WrapsDataWithMetadata()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test", ["tag1", "tag2"]);
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Verify wrapper structure
Assert.True(root.TryGetProperty("metadata", out var metadata));
Assert.True(root.TryGetProperty("data", out var data));
// Verify metadata fields
Assert.Equal(itemId.ToString(), metadata.GetProperty("itemId").GetString());
Assert.Equal("sbom", metadata.GetProperty("kind").GetString());
Assert.Equal("test", metadata.GetProperty("name").GetString());
Assert.NotNull(metadata.GetProperty("sha256").GetString());
// Verify data content preserved
Assert.Equal("test", data.GetProperty("name").GetString());
}
[Fact]
public async Task ProcessAsync_WithPolicyEvaluator_IncludesPolicyMetadata()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
_policyEvaluator.AddPolicy(itemId, new PolicyMetadata
{
PolicyId = "policy-001",
PolicyName = "Security Policy",
PolicyVersion = "1.0",
Decision = "allow",
EvaluatedAt = DateTimeOffset.UtcNow,
Violations = []
});
var context = CreateContextWithPolicy([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
Assert.True(root.TryGetProperty("policy", out var policy));
Assert.Equal("policy-001", policy.GetProperty("policyId").GetString());
Assert.Equal("Security Policy", policy.GetProperty("policyName").GetString());
Assert.Equal("allow", policy.GetProperty("decision").GetString());
}
[Fact]
public async Task ProcessAsync_WithPolicyViolations_IncludesViolations()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
_policyEvaluator.AddPolicy(itemId, new PolicyMetadata
{
PolicyId = "policy-001",
Decision = "deny",
Violations =
[
new PolicyViolation
{
RuleId = "CVE-001",
Severity = "critical",
Message = "Critical vulnerability found",
Path = "$.components[0]"
}
]
});
var context = CreateContextWithPolicy([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var violations = doc.RootElement.GetProperty("policy").GetProperty("violations");
Assert.Equal(1, violations.GetArrayLength());
Assert.Equal("CVE-001", violations[0].GetProperty("ruleId").GetString());
Assert.Equal("critical", violations[0].GetProperty("severity").GetString());
}
[Fact]
public async Task ProcessAsync_WithoutPolicyEvaluator_PolicyIsNull()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var root = doc.RootElement;
// Policy should be null when no evaluator
Assert.False(root.TryGetProperty("policy", out _) &&
root.GetProperty("policy").ValueKind != JsonValueKind.Null);
}
[Fact]
public async Task ProcessAsync_NdjsonFormat_CreatesWrappedNdjsonFile()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 3; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"component-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "index": {{i}} }""");
}
var context = CreateContext(items, ExportFormat.Ndjson);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.EndsWith("-policy.ndjson", result.Artifacts[0].Path);
Assert.Equal(3, result.Artifacts[0].ItemCount);
// Verify NDJSON content - each line should be a wrapped item
var content = await File.ReadAllTextAsync(result.Artifacts[0].Path);
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(3, lines.Length);
// Each line should have metadata and data
foreach (var line in lines)
{
using var doc = JsonDocument.Parse(line);
Assert.True(doc.RootElement.TryGetProperty("metadata", out _));
Assert.True(doc.RootElement.TryGetProperty("data", out _));
}
}
[Fact]
public async Task ProcessAsync_WithGzipCompression_CreatesCompressedFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy, CompressionFormat.Gzip);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.EndsWith(".policy.json.gz", result.Artifacts[0].Path);
Assert.True(result.Artifacts[0].IsCompressed);
// Verify decompression works
var compressedBytes = await File.ReadAllBytesAsync(result.Artifacts[0].Path);
using var ms = new MemoryStream(compressedBytes);
using var gzip = new GZipStream(ms, CompressionMode.Decompress);
using var reader = new StreamReader(gzip);
var decompressed = await reader.ReadToEndAsync();
Assert.Contains("metadata", decompressed);
}
[Fact]
public async Task ProcessAsync_IncludesChecksums_CreatesChecksumFiles()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy, includeChecksums: true);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var checksumPath = result.ItemResults[0].OutputPath + ".sha256";
Assert.True(File.Exists(checksumPath));
}
[Fact]
public async Task ProcessAsync_ManifestCounts_TracksCorrectly()
{
// Arrange
var items = new List<ResolvedExportItem>();
// Add 2 successful sbom items
for (var i = 0; i < 2; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"sbom-{i}"));
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
}
// Add 1 successful vex item
var vexItemId = Guid.NewGuid();
items.Add(CreateItem(vexItemId, "vex", "vex-1"));
_dataFetcher.AddContent(vexItemId, """{"name":"vex"}""");
// Add 1 failing item
var failingItemId = Guid.NewGuid();
items.Add(CreateItem(failingItemId, "attestation", "fail"));
var context = CreateContext(items, ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Equal(4, result.ManifestCounts.TotalItems);
Assert.Equal(3, result.ManifestCounts.SuccessfulItems);
Assert.Equal(1, result.ManifestCounts.FailedItems);
Assert.Equal(2, result.ManifestCounts.ByKind["sbom"]);
Assert.Equal(1, result.ManifestCounts.ByKind["vex"]);
Assert.Equal(1, result.ManifestCounts.ByKind["attestation"]);
}
[Fact]
public async Task ProcessAsync_FetchFailure_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
// Don't add content - will cause fetch failure
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success); // Overall success, individual failure
Assert.Single(result.ItemResults);
Assert.False(result.ItemResults[0].Success);
}
[Fact]
public async Task ProcessStreamAsync_YieldsResultsProgressively()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 5; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"item-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "index": {{i}} }""");
}
var context = CreateContext(items, ExportFormat.JsonPolicy);
// Act
var results = new List<AdapterItemResult>();
await foreach (var result in _adapter.ProcessStreamAsync(context))
{
results.Add(result);
}
// Assert
Assert.Equal(5, results.Count);
Assert.All(results, r => Assert.True(r.Success));
}
[Fact]
public async Task ValidateConfigAsync_MissingOutputDirectory_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = "",
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonPolicy }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("Output directory", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_UnsupportedFormat_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.Mirror }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("not supported", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_ValidConfig_ReturnsNoErrors()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonPolicy }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.Empty(errors);
}
[Fact]
public async Task ProcessAsync_NormalizesJson_SortsKeys()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"zebra":"z","alpha":"a"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
// The data object inside should be sorted
using var doc = JsonDocument.Parse(content);
var dataJson = doc.RootElement.GetProperty("data").GetRawText();
Assert.StartsWith("""{"alpha":""", dataJson);
}
[Fact]
public async Task ProcessAsync_WithRedaction_RedactsSensitiveFields()
{
// Arrange
var adapter = new JsonPolicyAdapter(
NullLogger<JsonPolicyAdapter>.Instance,
new JsonNormalizationOptions { SortKeys = true },
new JsonRedactionOptions { RedactFields = ["secretKey"] });
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","secretKey":"hidden123"}""");
var context = CreateContext([item], ExportFormat.JsonPolicy);
// Act
var result = await adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
Assert.DoesNotContain("hidden123", content);
Assert.Contains("[REDACTED]", content);
}
[Fact]
public async Task ProcessAsync_MetadataIncludesExportTimestamp()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var fixedTime = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(fixedTime);
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonPolicy },
IncludeChecksums = false
};
var context = new ExportAdapterContext
{
Config = config,
Items = [item],
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid(),
TimeProvider = timeProvider
};
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
using var doc = JsonDocument.Parse(content);
var exportedAt = doc.RootElement.GetProperty("metadata").GetProperty("exportedAt").GetString();
Assert.Contains("2025-01-15", exportedAt);
}
private ResolvedExportItem CreateItem(Guid itemId, string kind, string name, IReadOnlyList<string>? tags = null)
{
return new ResolvedExportItem
{
ItemId = itemId,
Kind = kind,
Name = name,
SourceRef = $"test://{name}",
Tags = tags ?? [],
CreatedAt = DateTimeOffset.UtcNow
};
}
private ExportAdapterContext CreateContext(
IReadOnlyList<ResolvedExportItem> items,
ExportFormat format,
CompressionFormat compression = CompressionFormat.None,
bool includeChecksums = true)
{
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions
{
Format = format,
Compression = compression
},
IncludeChecksums = includeChecksums
};
return new ExportAdapterContext
{
Config = config,
Items = items,
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
}
private ExportAdapterContext CreateContextWithPolicy(
IReadOnlyList<ResolvedExportItem> items,
ExportFormat format)
{
var config = new ExportAdapterConfig
{
AdapterId = "json:policy",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = format },
IncludeChecksums = false
};
return new ExportAdapterContext
{
Config = config,
Items = items,
DataFetcher = _dataFetcher,
PolicyEvaluator = _policyEvaluator,
TenantId = Guid.NewGuid()
};
}
private sealed class FakeTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTime;
public FakeTimeProvider(DateTimeOffset fixedTime)
{
_fixedTime = fixedTime;
}
public override DateTimeOffset GetUtcNow() => _fixedTime;
}
}

View File

@@ -0,0 +1,598 @@
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Adapters;
public sealed class JsonRawAdapterTests : IDisposable
{
private readonly string _tempDir;
private readonly JsonRawAdapter _adapter;
private readonly InMemoryExportDataFetcher _dataFetcher;
public JsonRawAdapterTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"export-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_adapter = new JsonRawAdapter(NullLogger<JsonRawAdapter>.Instance);
_dataFetcher = new InMemoryExportDataFetcher();
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void AdapterId_IsJsonRaw()
{
Assert.Equal("json:raw", _adapter.AdapterId);
}
[Fact]
public void DisplayName_IsSet()
{
Assert.Equal("JSON Raw", _adapter.DisplayName);
}
[Fact]
public void SupportedFormats_IncludesJsonRawAndNdjson()
{
Assert.Contains(ExportFormat.JsonRaw, _adapter.SupportedFormats);
Assert.Contains(ExportFormat.Ndjson, _adapter.SupportedFormats);
}
[Fact]
public void SupportsStreaming_IsTrue()
{
Assert.True(_adapter.SupportsStreaming);
}
[Fact]
public async Task ProcessAsync_SingleItem_CreatesJsonFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test-component");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.Single(result.ItemResults);
Assert.True(result.ItemResults[0].Success);
Assert.True(File.Exists(result.ItemResults[0].OutputPath));
Assert.Equal("sbom-test-component.json", Path.GetFileName(result.ItemResults[0].OutputPath));
}
[Fact]
public async Task ProcessAsync_MultipleItems_CreatesMultipleFiles()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 3; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"component-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "name": "component-{{i}}", "version": "1.0.0" }""");
}
var context = CreateContext(items, ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Equal(3, result.Artifacts.Count);
Assert.Equal(3, result.ItemResults.Count);
Assert.All(result.ItemResults, r => Assert.True(r.Success));
Assert.All(result.Artifacts, a => Assert.True(File.Exists(a.Path)));
}
[Fact]
public async Task ProcessAsync_NdjsonFormat_CreatesSingleFile()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 3; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"component-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "name": "component-{{i}}" }""");
}
var context = CreateContext(items, ExportFormat.Ndjson);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Single(result.Artifacts);
Assert.Equal(3, result.ItemResults.Count);
Assert.EndsWith(".ndjson", result.Artifacts[0].Path);
Assert.Equal(3, result.Artifacts[0].ItemCount);
// Verify NDJSON format
var content = await File.ReadAllTextAsync(result.Artifacts[0].Path);
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(3, lines.Length);
}
[Fact]
public async Task ProcessAsync_WithGzipCompression_CreatesCompressedFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, CompressionFormat.Gzip);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.EndsWith(".json.gz", result.Artifacts[0].Path);
Assert.True(result.Artifacts[0].IsCompressed);
Assert.Equal(CompressionFormat.Gzip, result.Artifacts[0].Compression);
// Verify it's actually gzip compressed
var compressedBytes = await File.ReadAllBytesAsync(result.Artifacts[0].Path);
using var ms = new MemoryStream(compressedBytes);
using var gzip = new GZipStream(ms, CompressionMode.Decompress);
using var reader = new StreamReader(gzip);
var decompressed = await reader.ReadToEndAsync();
Assert.Contains("test", decompressed);
}
[Fact]
public async Task ProcessAsync_WithBrotliCompression_CreatesCompressedFile()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, CompressionFormat.Brotli);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.EndsWith(".json.br", result.Artifacts[0].Path);
Assert.True(result.Artifacts[0].IsCompressed);
Assert.Equal(CompressionFormat.Brotli, result.Artifacts[0].Compression);
}
[Fact]
public async Task ProcessAsync_IncludesChecksums_CreatesChecksumFiles()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, includeChecksums: true);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var checksumPath = result.ItemResults[0].OutputPath + ".sha256";
Assert.True(File.Exists(checksumPath));
var checksumContent = await File.ReadAllTextAsync(checksumPath);
Assert.Contains("sbom-test.json", checksumContent);
Assert.Equal(64 + 2 + "sbom-test.json".Length + 1, checksumContent.Length); // hash + " " + filename + newline
}
[Fact]
public async Task ProcessAsync_DisabledChecksums_NoChecksumFiles()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
var context = CreateContext([item], ExportFormat.JsonRaw, includeChecksums: false);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var checksumPath = result.ItemResults[0].OutputPath + ".sha256";
Assert.False(File.Exists(checksumPath));
}
[Fact]
public async Task ProcessAsync_NormalizesJson_SortsKeys()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"zebra":"z","alpha":"a"}""");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
// Keys should be sorted alphabetically
Assert.StartsWith("""{"alpha":""", content);
}
[Fact]
public async Task ProcessAsync_ManifestCounts_TracksCorrectly()
{
// Arrange
var items = new List<ResolvedExportItem>();
// Add 2 successful items
for (var i = 0; i < 2; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"success-{i}"));
_dataFetcher.AddContent(itemId, """{"name":"test"}""");
}
// Add 1 item that will fail (no content)
var failingItemId = Guid.NewGuid();
items.Add(CreateItem(failingItemId, "vex", "fail"));
// Don't add content - will cause fetch failure
var context = CreateContext(items, ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.Equal(3, result.ManifestCounts.TotalItems);
Assert.Equal(2, result.ManifestCounts.SuccessfulItems);
Assert.Equal(1, result.ManifestCounts.FailedItems);
Assert.Equal(2, result.ManifestCounts.ByKind["sbom"]);
Assert.Equal(1, result.ManifestCounts.ByKind["vex"]);
}
[Fact]
public async Task ProcessAsync_FetchFailure_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
// Don't add content - will cause fetch failure
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success); // Overall success, individual failure
Assert.Single(result.ItemResults);
Assert.False(result.ItemResults[0].Success);
Assert.Contains("not found", result.ItemResults[0].ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task ProcessAsync_EmptyContent_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, "");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.False(result.ItemResults[0].Success);
Assert.Contains("empty", result.ItemResults[0].ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task ProcessAsync_InvalidJson_RecordsItemError()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, "{invalid json}");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
Assert.False(result.ItemResults[0].Success);
}
[Fact]
public async Task ProcessStreamAsync_YieldsResultsProgressively()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 5; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"item-{i}"));
_dataFetcher.AddContent(itemId, $$"""{ "index": {{i}} }""");
}
var context = CreateContext(items, ExportFormat.JsonRaw);
// Act
var results = new List<AdapterItemResult>();
await foreach (var result in _adapter.ProcessStreamAsync(context))
{
results.Add(result);
}
// Assert
Assert.Equal(5, results.Count);
Assert.All(results, r => Assert.True(r.Success));
}
[Fact]
public async Task ProcessStreamAsync_CancellationStopsProcessing()
{
// Arrange
var items = new List<ResolvedExportItem>();
for (var i = 0; i < 10; i++)
{
var itemId = Guid.NewGuid();
items.Add(CreateItem(itemId, "sbom", $"item-{i}"));
_dataFetcher.AddContent(itemId, """{"test":true}""");
}
var context = CreateContext(items, ExportFormat.JsonRaw);
using var cts = new CancellationTokenSource();
// Act
var count = 0;
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
{
await foreach (var result in _adapter.ProcessStreamAsync(context, cts.Token))
{
count++;
if (count >= 3)
{
cts.Cancel();
}
}
});
// Assert
Assert.True(count >= 3);
Assert.True(count < 10);
}
[Fact]
public async Task ValidateConfigAsync_MissingOutputDirectory_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = "",
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonRaw }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("Output directory", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_UnsupportedFormat_ReturnsError()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.Csv }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.NotEmpty(errors);
Assert.Contains("not supported", errors[0]);
}
[Fact]
public async Task ValidateConfigAsync_ValidConfig_ReturnsNoErrors()
{
// Arrange
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonRaw }
};
// Act
var errors = await _adapter.ValidateConfigAsync(config);
// Assert
Assert.Empty(errors);
}
[Fact]
public async Task ProcessAsync_PrettyPrint_FormatsOutput()
{
// Arrange
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","version":"1.0.0"}""");
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions
{
Format = ExportFormat.JsonRaw,
PrettyPrint = true
},
IncludeChecksums = false
};
var context = new ExportAdapterContext
{
Config = config,
Items = [item],
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
// Act
var result = await _adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
Assert.Contains("\n", content); // Pretty printed has newlines
}
[Fact]
public async Task ProcessAsync_WithRedaction_RedactsSensitiveFields()
{
// Arrange
var adapter = new JsonRawAdapter(
NullLogger<JsonRawAdapter>.Instance,
new JsonNormalizationOptions { SortKeys = true },
new JsonRedactionOptions { RedactFields = ["apiKey"] });
var itemId = Guid.NewGuid();
var item = CreateItem(itemId, "sbom", "test");
_dataFetcher.AddContent(itemId, """{"name":"test","apiKey":"secret123"}""");
var context = CreateContext([item], ExportFormat.JsonRaw);
// Act
var result = await adapter.ProcessAsync(context);
// Assert
Assert.True(result.Success);
var content = await File.ReadAllTextAsync(result.ItemResults[0].OutputPath!);
Assert.DoesNotContain("secret123", content);
Assert.Contains("[REDACTED]", content);
}
[Fact]
public async Task ProcessAsync_DeterministicOutput_SameInputSameHash()
{
// Arrange
var itemId1 = Guid.NewGuid();
var item1 = CreateItem(itemId1, "sbom", "test");
_dataFetcher.AddContent(itemId1, """{"z":"2","a":"1"}""");
var context1 = CreateContext([item1], ExportFormat.JsonRaw);
var result1 = await _adapter.ProcessAsync(context1);
// Reset for second run
var dir2 = Path.Combine(Path.GetTempPath(), $"export-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(dir2);
try
{
var itemId2 = Guid.NewGuid();
var item2 = CreateItem(itemId2, "sbom", "test");
_dataFetcher.AddContent(itemId2, """{"a":"1","z":"2"}"""); // Same data, different order
var config2 = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = dir2,
FormatOptions = new ExportFormatOptions { Format = ExportFormat.JsonRaw }
};
var context2 = new ExportAdapterContext
{
Config = config2,
Items = [item2],
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
var result2 = await _adapter.ProcessAsync(context2);
// Assert - both should have same content hash after normalization
var content1 = await File.ReadAllTextAsync(result1.ItemResults[0].OutputPath!);
var content2 = await File.ReadAllTextAsync(result2.ItemResults[0].OutputPath!);
Assert.Equal(content1, content2);
}
finally
{
Directory.Delete(dir2, recursive: true);
}
}
private ResolvedExportItem CreateItem(Guid itemId, string kind, string name)
{
return new ResolvedExportItem
{
ItemId = itemId,
Kind = kind,
Name = name,
SourceRef = $"test://{name}",
CreatedAt = DateTimeOffset.UtcNow
};
}
private ExportAdapterContext CreateContext(
IReadOnlyList<ResolvedExportItem> items,
ExportFormat format,
CompressionFormat compression = CompressionFormat.None,
bool includeChecksums = true)
{
var config = new ExportAdapterConfig
{
AdapterId = "json:raw",
OutputDirectory = _tempDir,
FormatOptions = new ExportFormatOptions
{
Format = format,
Compression = compression
},
IncludeChecksums = includeChecksums
};
return new ExportAdapterContext
{
Config = config,
Items = items,
DataFetcher = _dataFetcher,
TenantId = Guid.NewGuid()
};
}
}

View File

@@ -0,0 +1,394 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivyDbAdapterTests
{
private readonly TrivyAdapterOptions _defaultOptions;
private readonly TrivyDbAdapter _adapter;
public TrivyDbAdapterTests()
{
_defaultOptions = new TrivyAdapterOptions();
var options = Options.Create(_defaultOptions);
_adapter = new TrivyDbAdapter(options, NullLogger<TrivyDbAdapter>.Instance);
}
[Fact]
public void Name_ReturnsTrivyDb()
{
Assert.Equal("trivy:db", _adapter.Name);
}
[Fact]
public void AdapterId_ReturnsExpected()
{
Assert.Equal("adapter:trivy:db", _adapter.AdapterId);
}
[Fact]
public void SchemaVersion_ReturnsV2()
{
Assert.Equal(TrivySchemaVersion.V2, _adapter.SchemaVersion);
}
[Fact]
public void ValidateConfiguration_WithV2_Succeeds()
{
// Should not throw
_adapter.ValidateConfiguration();
}
[Fact]
public void ValidateConfiguration_WithV3_Throws()
{
var options = new TrivyAdapterOptions { SchemaVersion = 3 };
var adapter = new TrivyDbAdapter(Options.Create(options), NullLogger<TrivyDbAdapter>.Instance);
var exception = Assert.Throws<TrivyAdapterException>(() => adapter.ValidateConfiguration());
Assert.Equal(TrivyAdapterErrors.UnsupportedSchemaVersion, exception.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithValidAdvisory_ReturnsValid()
{
var advisory = CreateValidAdvisory();
var result = _adapter.ValidateAdvisory(advisory);
Assert.True(result.IsValid);
}
[Fact]
public void ValidateAdvisory_WithNoIdentifiers_ReturnsInvalid()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers()
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.False(result.IsValid);
Assert.Equal(TrivyAdapterErrors.InvalidAdvisory, result.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithNoVendor_ReturnsInvalid()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.False(result.IsValid);
Assert.Equal(TrivyAdapterErrors.InvalidAdvisory, result.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithUnsupportedNamespace_ReturnsInvalid()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "UnsupportedVendor" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.False(result.IsValid);
Assert.Equal(TrivyAdapterErrors.UnsupportedNamespace, result.ErrorCode);
}
[Fact]
public void ValidateAdvisory_WithMissingSeverity_ReturnsValidWithWarning()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
// No severity or CVSS
};
var result = _adapter.ValidateAdvisory(advisory);
Assert.True(result.IsValid);
Assert.NotNull(result.Warnings);
Assert.Contains(result.Warnings, w => w.Contains("UNKNOWN severity"));
}
[Fact]
public void TransformAdvisory_WithValidAdvisory_ReturnsRecords()
{
var advisory = CreateValidAdvisory();
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
var record = records[0];
Assert.Equal("ubuntu:22.04", record.Namespace);
Assert.Equal("openssl", record.Package.Name);
Assert.Equal("CVE-2024-12345", record.Vulnerability.Id);
Assert.Equal("HIGH", record.Vulnerability.Severity);
}
[Fact]
public void TransformAdvisory_WithUnsupportedNamespace_ReturnsEmptyList()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "UnsupportedVendor" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage { Name = "some-package" }
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.Empty(records);
}
[Fact]
public void TransformAdvisory_MapsSeverityCorrectly()
{
var testCases = new (string input, string expected)[]
{
("critical", "CRITICAL"),
("high", "HIGH"),
("medium", "MEDIUM"),
("low", "LOW"),
("none", "UNKNOWN"),
("info", "UNKNOWN")
};
foreach (var (input, expected) in testCases)
{
var advisory = CreateValidAdvisory();
advisory = advisory with
{
Severity = new TrivyAdapterSeverity { Normalized = input }
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.Equal(expected, records[0].Vulnerability.Severity);
}
}
[Fact]
public void TransformAdvisory_TruncatesTitleToMaxLength()
{
var longTitle = new string('A', 300);
var advisory = CreateValidAdvisory();
advisory = advisory with { Summary = longTitle };
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.NotNull(records[0].Vulnerability.Title);
Assert.Equal(256, records[0].Vulnerability.Title!.Length);
Assert.NotNull(records[0].Vulnerability.Description);
Assert.Contains("A", records[0].Vulnerability.Description!);
}
[Fact]
public void TransformAdvisory_WithCvssButNoSeverity_DerivesSeverityFromScore()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Severity = null,
Cvss =
[
new TrivyAdapterCvss { Vector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", Score = 9.8 }
],
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage { Name = "openssl" }
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.Equal("CRITICAL", records[0].Vulnerability.Severity);
}
[Fact]
public async Task TransformAsync_WithMultipleAdvisories_ProducesUniqueRecords()
{
var advisories = AsyncEnumerable([
CreateValidAdvisory(),
CreateValidAdvisory(), // Duplicate
CreateValidAdvisory() with
{
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-67890"] }
}
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Records.Count); // Only 2 unique records
Assert.Equal(1, result.DuplicatesRemoved);
Assert.Equal(3, result.TotalInputRecords);
}
[Fact]
public async Task TransformAsync_WithEmptyInput_ThrowsWhenAllowEmptyIsFalse()
{
var advisories = AsyncEnumerable(Array.Empty<TrivyAdapterInputAdvisory>());
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
await Assert.ThrowsAsync<TrivyAdapterException>(
() => _adapter.TransformAsync(advisories, context));
}
[Fact]
public async Task TransformAsync_WithEmptyInput_SucceedsWhenAllowEmptyIsTrue()
{
var options = new TrivyAdapterOptions { AllowEmpty = true };
var adapter = new TrivyDbAdapter(Options.Create(options), NullLogger<TrivyDbAdapter>.Instance);
var advisories = AsyncEnumerable(Array.Empty<TrivyAdapterInputAdvisory>());
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await adapter.TransformAsync(advisories, context);
Assert.Empty(result.Records);
}
[Fact]
public async Task TransformAsync_ProducesCorrectMetadata()
{
var advisories = AsyncEnumerable([CreateValidAdvisory()]);
var generatedAt = new DateTimeOffset(2025, 12, 11, 12, 0, 0, TimeSpan.Zero);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant",
PolicySnapshotId = "policy-snap-42",
GeneratedAt = generatedAt
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Metadata.SchemaVersion);
Assert.Equal(generatedAt, result.Metadata.UpdatedAt);
Assert.NotNull(result.Metadata.Stella);
Assert.Equal("test-run-1", result.Metadata.Stella.RunId);
Assert.Equal("test-profile-1", result.Metadata.Stella.ProfileId);
Assert.Equal("test-tenant", result.Metadata.Stella.Tenant);
Assert.Equal("policy-snap-42", result.Metadata.Stella.PolicySnapshotId);
}
[Fact]
public void IsNamespaceSupported_WithKnownNamespaces_ReturnsTrue()
{
var supported = new[] { "Ubuntu", "Debian", "Alpine", "Red Hat" };
foreach (var vendor in supported)
{
Assert.True(_adapter.IsNamespaceSupported(vendor, null), $"{vendor} should be supported");
}
}
[Fact]
public void IsNamespaceSupported_WithUnknownNamespace_ReturnsFalse()
{
Assert.False(_adapter.IsNamespaceSupported("UnknownVendor", null));
}
[Fact]
public void IsEcosystemSupported_WithKnownEcosystems_ReturnsTrue()
{
var supported = new[] { "npm", "pip", "nuget", "go", "cargo" };
foreach (var ecosystem in supported)
{
Assert.True(_adapter.IsEcosystemSupported(ecosystem), $"{ecosystem} should be supported");
}
}
[Fact]
public void IsEcosystemSupported_WithJavaEcosystem_ReturnsTrue()
{
// Java ecosystems are supported for routing but handled by Java DB adapter
Assert.True(_adapter.IsEcosystemSupported("maven"));
}
private static TrivyAdapterInputAdvisory CreateValidAdvisory()
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "Ubuntu", Product = "22.04" },
Identifiers = new TrivyAdapterIdentifiers
{
Cve = ["CVE-2024-12345"]
},
Summary = "Test vulnerability",
Description = "A test vulnerability description.",
Severity = new TrivyAdapterSeverity { Normalized = "high" },
Published = DateTimeOffset.UtcNow.AddDays(-30),
Modified = DateTimeOffset.UtcNow.AddDays(-1),
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "openssl",
Ecosystem = "ubuntu",
Nevra = "1.1.1f-1ubuntu2.12"
},
VulnerableRange = "< 1.1.1f-1ubuntu2.13",
Remediations =
[
new TrivyAdapterRemediation { FixedVersion = "1.1.1f-1ubuntu2.13" }
]
}
]
};
}
private static async IAsyncEnumerable<T> AsyncEnumerable<T>(T[] items)
{
foreach (var item in items)
{
yield return item;
}
await Task.CompletedTask;
}
}

View File

@@ -0,0 +1,453 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivyJavaDbAdapterTests
{
private readonly TrivyAdapterOptions _defaultOptions;
private readonly TrivyJavaDbAdapter _adapter;
public TrivyJavaDbAdapterTests()
{
_defaultOptions = new TrivyAdapterOptions { IncludeJavaDb = true };
var options = Options.Create(_defaultOptions);
_adapter = new TrivyJavaDbAdapter(options, NullLogger<TrivyJavaDbAdapter>.Instance);
}
[Fact]
public void Name_ReturnsTrivyJavaDb()
{
Assert.Equal("trivy:java-db", _adapter.Name);
}
[Fact]
public void AdapterId_ReturnsExpected()
{
Assert.Equal("adapter:trivy:java-db", _adapter.AdapterId);
}
[Fact]
public void SupportedEcosystems_ContainsMavenGradleSbt()
{
Assert.Contains("maven", _adapter.SupportedEcosystems);
Assert.Contains("gradle", _adapter.SupportedEcosystems);
Assert.Contains("sbt", _adapter.SupportedEcosystems);
}
[Fact]
public void ValidateConfiguration_WithV2_Succeeds()
{
_adapter.ValidateConfiguration();
}
[Fact]
public void ValidateConfiguration_WithV3_Throws()
{
var options = new TrivyAdapterOptions { SchemaVersion = 3 };
var adapter = new TrivyJavaDbAdapter(Options.Create(options), NullLogger<TrivyJavaDbAdapter>.Instance);
var exception = Assert.Throws<TrivyAdapterException>(() => adapter.ValidateConfiguration());
Assert.Equal(TrivyAdapterErrors.UnsupportedSchemaVersion, exception.ErrorCode);
}
[Fact]
public void HasJavaPackages_WithMavenPackage_ReturnsTrue()
{
var advisory = CreateMavenAdvisory();
Assert.True(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithGradlePackage_ReturnsTrue()
{
var advisory = CreateAdvisoryWithEcosystem("gradle");
Assert.True(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithSbtPackage_ReturnsTrue()
{
var advisory = CreateAdvisoryWithEcosystem("sbt");
Assert.True(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithNpmPackage_ReturnsFalse()
{
var advisory = CreateAdvisoryWithEcosystem("npm");
Assert.False(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void HasJavaPackages_WithNoAffects_ReturnsFalse()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] }
};
Assert.False(_adapter.HasJavaPackages(advisory));
}
[Fact]
public void TransformAdvisory_WithMavenPackage_ReturnsRecords()
{
var advisory = CreateMavenAdvisory();
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
var record = records[0];
Assert.Equal("maven", record.Namespace);
Assert.Equal("org.apache.logging.log4j", record.Package.GroupId);
Assert.Equal("log4j-core", record.Package.ArtifactId);
Assert.Equal("org.apache.logging.log4j:log4j-core", record.Package.Name);
Assert.Equal("CVE-2021-44228", record.Vulnerability.Id);
}
[Fact]
public void TransformAdvisory_WithNonJavaPackage_ReturnsEmptyList()
{
var advisory = CreateAdvisoryWithEcosystem("npm");
var records = _adapter.TransformAdvisory(advisory);
Assert.Empty(records);
}
[Fact]
public void ParseMavenCoordinates_WithPurl_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
null,
"pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1");
Assert.NotNull(coords);
Assert.Equal("org.apache.logging.log4j", coords.GroupId);
Assert.Equal("log4j-core", coords.ArtifactId);
Assert.Equal("2.14.1", coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithPurlNoVersion_ReturnsCoordinatesWithoutVersion()
{
var coords = _adapter.ParseMavenCoordinates(
null,
"pkg:maven/com.example/my-artifact");
Assert.NotNull(coords);
Assert.Equal("com.example", coords.GroupId);
Assert.Equal("my-artifact", coords.ArtifactId);
Assert.Null(coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithColonFormat_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
"org.springframework:spring-core:5.3.0",
null);
Assert.NotNull(coords);
Assert.Equal("org.springframework", coords.GroupId);
Assert.Equal("spring-core", coords.ArtifactId);
Assert.Equal("5.3.0", coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithColonFormatNoVersion_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
"com.google.guava:guava",
null);
Assert.NotNull(coords);
Assert.Equal("com.google.guava", coords.GroupId);
Assert.Equal("guava", coords.ArtifactId);
Assert.Null(coords.Version);
}
[Fact]
public void ParseMavenCoordinates_WithSlashFormat_ReturnsCoordinates()
{
var coords = _adapter.ParseMavenCoordinates(
"org.example/artifact-name",
null);
Assert.NotNull(coords);
Assert.Equal("org.example", coords.GroupId);
Assert.Equal("artifact-name", coords.ArtifactId);
}
[Fact]
public void ParseMavenCoordinates_WithInvalidFormat_ReturnsNull()
{
var coords = _adapter.ParseMavenCoordinates(
"single-name-no-separator",
null);
Assert.Null(coords);
}
[Theory]
[InlineData("< 2.15.0", "(,2.15.0)")]
[InlineData("<= 2.15.0", "(,2.15.0]")]
[InlineData("> 1.0.0", "(1.0.0,)")]
[InlineData(">= 1.0.0", "[1.0.0,)")]
[InlineData("= 2.0.0", "[2.0.0]")]
[InlineData("[1.0.0,2.0.0)", "[1.0.0,2.0.0)")]
public void TransformAdvisory_ConvertsVersionRangeToMavenFormat(string input, string expected)
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "org.example:test-artifact",
Ecosystem = "maven"
},
VulnerableRange = input
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.NotNull(records[0].Package.VulnerableVersions);
Assert.Contains(expected, records[0].Package.VulnerableVersions!);
}
[Fact]
public async Task TransformAsync_WithMultipleAdvisories_DeduplicatesRecords()
{
var advisories = AsyncEnumerable([
CreateMavenAdvisory(),
CreateMavenAdvisory(), // Duplicate
CreateAdvisoryWithDifferentCve()
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Records.Count);
Assert.Equal(1, result.DuplicatesRemoved);
}
[Fact]
public async Task TransformAsync_WithMixedEcosystems_FiltersToJavaOnly()
{
var advisories = AsyncEnumerable([
CreateMavenAdvisory(),
CreateAdvisoryWithEcosystem("npm"), // Should be skipped
CreateAdvisoryWithEcosystem("pip") // Should be skipped
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Single(result.Records);
Assert.Equal(2, result.SkippedNonJavaEcosystem);
}
[Fact]
public async Task TransformAsync_ProducesCorrectMetadata()
{
var advisories = AsyncEnumerable([CreateMavenAdvisory()]);
var generatedAt = new DateTimeOffset(2025, 12, 11, 12, 0, 0, TimeSpan.Zero);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant",
GeneratedAt = generatedAt
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(2, result.Metadata.SchemaVersion);
Assert.Contains("maven", result.Metadata.Ecosystems);
Assert.Contains("gradle", result.Metadata.Ecosystems);
Assert.Contains("sbt", result.Metadata.Ecosystems);
Assert.Equal(generatedAt, result.Metadata.UpdatedAt);
Assert.NotNull(result.Metadata.Stella);
}
[Fact]
public async Task TransformAsync_RecordsAreSortedDeterministically()
{
var advisories = AsyncEnumerable([
CreateAdvisoryWithGroupArtifact("z.group", "z-artifact", "CVE-2024-00003"),
CreateAdvisoryWithGroupArtifact("a.group", "a-artifact", "CVE-2024-00001"),
CreateAdvisoryWithGroupArtifact("a.group", "b-artifact", "CVE-2024-00002")
]);
var context = new TrivyAdapterContext
{
RunId = "test-run-1",
ProfileId = "test-profile-1",
TenantId = "test-tenant"
};
var result = await _adapter.TransformAsync(advisories, context);
Assert.Equal(3, result.Records.Count);
Assert.Equal("a.group", result.Records[0].Package.GroupId);
Assert.Equal("a-artifact", result.Records[0].Package.ArtifactId);
Assert.Equal("a.group", result.Records[1].Package.GroupId);
Assert.Equal("b-artifact", result.Records[1].Package.ArtifactId);
Assert.Equal("z.group", result.Records[2].Package.GroupId);
}
[Fact]
public void TransformAdvisory_WithGroupAndArtifactInPackage_UsesDirectCoordinates()
{
var advisory = new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "some-name",
Ecosystem = "maven",
Group = "direct.group",
Artifact = "direct-artifact"
}
}
]
};
var records = _adapter.TransformAdvisory(advisory);
Assert.NotEmpty(records);
Assert.Equal("direct.group", records[0].Package.GroupId);
Assert.Equal("direct-artifact", records[0].Package.ArtifactId);
}
private static TrivyAdapterInputAdvisory CreateMavenAdvisory()
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers
{
Cve = ["CVE-2021-44228"]
},
Summary = "Log4j RCE vulnerability",
Severity = new TrivyAdapterSeverity { Normalized = "critical" },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "org.apache.logging.log4j:log4j-core",
Ecosystem = "maven",
Purl = "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1"
},
VulnerableRange = "< 2.15.0",
Remediations =
[
new TrivyAdapterRemediation { FixedVersion = "2.15.0" }
]
}
]
};
}
private static TrivyAdapterInputAdvisory CreateAdvisoryWithEcosystem(string ecosystem)
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2024-12345"] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = ecosystem == "maven" ? "org.example:test" : "test-package",
Ecosystem = ecosystem
}
}
]
};
}
private static TrivyAdapterInputAdvisory CreateAdvisoryWithDifferentCve()
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = ["CVE-2021-45046"] },
Summary = "Log4j second vulnerability",
Severity = new TrivyAdapterSeverity { Normalized = "critical" },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = "org.apache.logging.log4j:log4j-core",
Ecosystem = "maven"
},
VulnerableRange = "< 2.16.0"
}
]
};
}
private static TrivyAdapterInputAdvisory CreateAdvisoryWithGroupArtifact(
string groupId, string artifactId, string cve)
{
return new TrivyAdapterInputAdvisory
{
Source = new TrivyAdapterSource { Vendor = "NVD" },
Identifiers = new TrivyAdapterIdentifiers { Cve = [cve] },
Affects =
[
new TrivyAdapterAffected
{
Package = new TrivyAdapterPackage
{
Name = $"{groupId}:{artifactId}",
Ecosystem = "maven"
}
}
]
};
}
private static async IAsyncEnumerable<T> AsyncEnumerable<T>(T[] items)
{
foreach (var item in items)
{
yield return item;
}
await Task.CompletedTask;
}
}

View File

@@ -0,0 +1,172 @@
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivyNamespaceMapperTests
{
private readonly TrivyNamespaceMapper _mapper;
public TrivyNamespaceMapperTests()
{
_mapper = new TrivyNamespaceMapper(new TrivyAdapterOptions());
}
[Theory]
[InlineData("Ubuntu", "22.04", "ubuntu:22.04")]
[InlineData("Ubuntu", "20.04", "ubuntu:20.04")]
[InlineData("Ubuntu", "18.04", "ubuntu:18.04")]
[InlineData("Ubuntu", "24.04", "ubuntu:24.04")]
[InlineData("Debian", "11", "debian:11")]
[InlineData("Debian", "12", "debian:12")]
[InlineData("Alpine", "3.18", "alpine:3.18")]
[InlineData("Alpine", "3.19", "alpine:3.19")]
public void MapNamespace_WithKnownDistribution_ReturnsCorrectMapping(
string vendor, string product, string expected)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
Assert.Equal(NamespaceKind.Distribution, result.Kind);
}
[Theory]
[InlineData("Red Hat Enterprise Linux 8", null, "redhat:8")]
[InlineData("RHEL 9", null, "redhat:9")]
[InlineData("Amazon Linux 2", null, "amazon:2")]
[InlineData("AL2023", null, "amazon:2023")]
[InlineData("Rocky Linux 9", null, "rocky:9")]
[InlineData("Oracle Linux 8", null, "oracle:8")]
public void MapNamespace_WithCodeNames_ReturnsCorrectMapping(
string vendor, string? product, string expected)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
[Fact]
public void MapNamespace_WithDebianCodenames_ReturnsCorrectMapping()
{
var testCases = new (string vendor, string? product, string expected)[]
{
("Debian Bookworm", null, "debian:12"),
("Debian Bullseye", null, "debian:11"),
("Debian Buster", null, "debian:10")
};
foreach (var (vendor, product, expected) in testCases)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
}
[Fact]
public void MapNamespace_WithUbuntuCodenames_ReturnsCorrectMapping()
{
var testCases = new (string vendor, string? product, string expected)[]
{
("Ubuntu Jammy", null, "ubuntu:22.04"),
("Ubuntu Focal", null, "ubuntu:20.04"),
("Ubuntu Bionic", null, "ubuntu:18.04")
};
foreach (var (vendor, product, expected) in testCases)
{
var result = _mapper.MapNamespace(vendor, product);
Assert.NotNull(result);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void MapNamespace_WithNullOrEmptyVendor_ReturnsNull(string? vendor)
{
var result = _mapper.MapNamespace(vendor, null);
Assert.Null(result);
}
[Fact]
public void MapNamespace_WithUnsupportedVendor_ReturnsNull()
{
var result = _mapper.MapNamespace("UnsupportedVendor", null);
Assert.Null(result);
}
[Theory]
[InlineData("npm", "npm", NamespaceKind.OssEcosystem)]
[InlineData("pip", "pip", NamespaceKind.OssEcosystem)]
[InlineData("nuget", "nuget", NamespaceKind.OssEcosystem)]
[InlineData("go", "go", NamespaceKind.OssEcosystem)]
[InlineData("cargo", "cargo", NamespaceKind.OssEcosystem)]
[InlineData("composer", "composer", NamespaceKind.OssEcosystem)]
[InlineData("gem", "gem", NamespaceKind.OssEcosystem)]
public void MapEcosystem_WithOssEcosystems_ReturnsCorrectMapping(
string ecosystem, string expectedName, NamespaceKind expectedKind)
{
var result = _mapper.MapEcosystem(ecosystem);
Assert.NotNull(result);
Assert.Equal(expectedName, result.Name);
Assert.Equal(expectedKind, result.Kind);
}
[Theory]
[InlineData("maven")]
[InlineData("gradle")]
[InlineData("sbt")]
public void MapEcosystem_WithJavaEcosystems_ReturnsJavaEcosystemKind(string ecosystem)
{
var result = _mapper.MapEcosystem(ecosystem);
Assert.NotNull(result);
Assert.Equal(ecosystem, result.Name);
Assert.Equal(NamespaceKind.JavaEcosystem, result.Kind);
}
[Theory]
[InlineData("pypi", "pip")]
[InlineData("rubygems", "gem")]
public void MapEcosystem_WithAliases_NormalizesToCanonical(string input, string expected)
{
var result = _mapper.MapEcosystem(input);
Assert.NotNull(result);
Assert.Equal(expected, result.Name);
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void MapEcosystem_WithNullOrEmpty_ReturnsNull(string? ecosystem)
{
var result = _mapper.MapEcosystem(ecosystem);
Assert.Null(result);
}
[Fact]
public void MapEcosystem_WithUnsupportedEcosystem_ReturnsNull()
{
var result = _mapper.MapEcosystem("unsupported-ecosystem");
Assert.Null(result);
}
[Theory]
[InlineData("ubuntu", "22.04", "ubuntu:22.04")]
[InlineData("debian", null, "debian")]
[InlineData("npm", null, "npm")]
public void FormatNamespace_FormatsCorrectly(string name, string? version, string expected)
{
var result = new TrivyNamespaceResult(name, version, NamespaceKind.Distribution);
Assert.Equal(expected, TrivyNamespaceMapper.FormatNamespace(result));
}
}

View File

@@ -0,0 +1,82 @@
using StellaOps.ExportCenter.WebService.Adapters.Trivy;
namespace StellaOps.ExportCenter.Tests.Adapters.Trivy;
public class TrivySeverityMapperTests
{
[Theory]
[InlineData("critical", "CRITICAL")]
[InlineData("CRITICAL", "CRITICAL")]
[InlineData("Critical", "CRITICAL")]
[InlineData("high", "HIGH")]
[InlineData("HIGH", "HIGH")]
[InlineData("medium", "MEDIUM")]
[InlineData("MEDIUM", "MEDIUM")]
[InlineData("low", "LOW")]
[InlineData("LOW", "LOW")]
[InlineData("none", "UNKNOWN")]
[InlineData("info", "UNKNOWN")]
[InlineData("informational", "UNKNOWN")]
[InlineData("unknown", "UNKNOWN")]
public void MapSeverity_ReturnsCorrectMapping(string input, string expected)
{
var result = TrivySeverityMapper.MapSeverity(input);
Assert.Equal(expected, result);
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void MapSeverity_WithNullOrEmpty_ReturnsUnknown(string? input)
{
var result = TrivySeverityMapper.MapSeverity(input);
Assert.Equal("UNKNOWN", result);
}
[Theory]
[InlineData("invalid")]
[InlineData("something")]
[InlineData("severe")]
public void MapSeverity_WithUnknownValue_ReturnsUnknown(string input)
{
var result = TrivySeverityMapper.MapSeverity(input);
Assert.Equal("UNKNOWN", result);
}
[Theory]
[InlineData(10.0, "CRITICAL")]
[InlineData(9.8, "CRITICAL")]
[InlineData(9.0, "CRITICAL")]
[InlineData(8.9, "HIGH")]
[InlineData(7.0, "HIGH")]
[InlineData(6.9, "MEDIUM")]
[InlineData(4.0, "MEDIUM")]
[InlineData(3.9, "LOW")]
[InlineData(0.1, "LOW")]
[InlineData(0.0, "UNKNOWN")]
public void SeverityFromCvssScore_ReturnsCorrectSeverity(double score, string expected)
{
var result = TrivySeverityMapper.SeverityFromCvssScore(score);
Assert.Equal(expected, result);
}
[Theory]
[InlineData("CRITICAL", 0)]
[InlineData("HIGH", 1)]
[InlineData("MEDIUM", 2)]
[InlineData("LOW", 3)]
[InlineData("UNKNOWN", 4)]
public void GetSeverityPriority_ReturnsCorrectPriority(string severity, int expectedPriority)
{
var result = TrivySeverityMapper.GetSeverityPriority(severity);
Assert.Equal(expectedPriority, result);
}
[Fact]
public void GetSeverityPriority_WithUnknownSeverity_ReturnsFour()
{
var result = TrivySeverityMapper.GetSeverityPriority("SOMETHING_ELSE");
Assert.Equal(4, result);
}
}

View File

@@ -0,0 +1,545 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Domain;
using StellaOps.ExportCenter.WebService.Api;
namespace StellaOps.ExportCenter.Tests.Api;
public class ExportApiRepositoryTests
{
private readonly Guid _tenantId = Guid.NewGuid();
// ========================================================================
// Profile Repository Tests
// ========================================================================
[Fact]
public async Task ProfileRepo_CreateAsync_StoresProfile()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile();
// Act
var created = await repo.CreateAsync(profile);
// Assert
Assert.Equal(profile.ProfileId, created.ProfileId);
Assert.Equal(profile.Name, created.Name);
}
[Fact]
public async Task ProfileRepo_GetByIdAsync_ReturnsStoredProfile()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile();
await repo.CreateAsync(profile);
// Act
var retrieved = await repo.GetByIdAsync(_tenantId, profile.ProfileId);
// Assert
Assert.NotNull(retrieved);
Assert.Equal(profile.ProfileId, retrieved.ProfileId);
Assert.Equal(profile.Name, retrieved.Name);
}
[Fact]
public async Task ProfileRepo_GetByIdAsync_ReturnsNull_WhenNotFound()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
// Act
var retrieved = await repo.GetByIdAsync(_tenantId, Guid.NewGuid());
// Assert
Assert.Null(retrieved);
}
[Fact]
public async Task ProfileRepo_GetByIdAsync_ReturnsNull_WhenWrongTenant()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile();
await repo.CreateAsync(profile);
// Act
var retrieved = await repo.GetByIdAsync(Guid.NewGuid(), profile.ProfileId);
// Assert
Assert.Null(retrieved);
}
[Fact]
public async Task ProfileRepo_ListAsync_ReturnsAllProfilesForTenant()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile1 = CreateTestProfile("Profile 1");
var profile2 = CreateTestProfile("Profile 2");
var otherTenantProfile = CreateTestProfile("Other Tenant") with { TenantId = Guid.NewGuid() };
await repo.CreateAsync(profile1);
await repo.CreateAsync(profile2);
await repo.CreateAsync(otherTenantProfile);
// Act
var (items, totalCount) = await repo.ListAsync(_tenantId);
// Assert
Assert.Equal(2, totalCount);
Assert.Equal(2, items.Count);
Assert.All(items, p => Assert.Equal(_tenantId, p.TenantId));
}
[Fact]
public async Task ProfileRepo_ListAsync_FiltersByStatus()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var activeProfile = CreateTestProfile("Active") with { Status = ExportProfileStatus.Active };
var draftProfile = CreateTestProfile("Draft") with { Status = ExportProfileStatus.Draft };
await repo.CreateAsync(activeProfile);
await repo.CreateAsync(draftProfile);
// Act
var (items, totalCount) = await repo.ListAsync(_tenantId, status: ExportProfileStatus.Active);
// Assert
Assert.Equal(1, totalCount);
Assert.Single(items);
Assert.Equal(ExportProfileStatus.Active, items[0].Status);
}
[Fact]
public async Task ProfileRepo_ListAsync_FiltersByKind()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var adhocProfile = CreateTestProfile("AdHoc") with { Kind = ExportProfileKind.AdHoc };
var scheduledProfile = CreateTestProfile("Scheduled") with { Kind = ExportProfileKind.Scheduled };
await repo.CreateAsync(adhocProfile);
await repo.CreateAsync(scheduledProfile);
// Act
var (items, totalCount) = await repo.ListAsync(_tenantId, kind: ExportProfileKind.Scheduled);
// Assert
Assert.Equal(1, totalCount);
Assert.Single(items);
Assert.Equal(ExportProfileKind.Scheduled, items[0].Kind);
}
[Fact]
public async Task ProfileRepo_ListAsync_SearchesByName()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile1 = CreateTestProfile("Daily SBOM Export");
var profile2 = CreateTestProfile("Weekly VEX Export");
await repo.CreateAsync(profile1);
await repo.CreateAsync(profile2);
// Act
var (items, totalCount) = await repo.ListAsync(_tenantId, search: "SBOM");
// Assert
Assert.Equal(1, totalCount);
Assert.Single(items);
Assert.Contains("SBOM", items[0].Name);
}
[Fact]
public async Task ProfileRepo_UpdateAsync_ModifiesProfile()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile();
await repo.CreateAsync(profile);
var updated = profile with { Name = "Updated Name", UpdatedAt = DateTimeOffset.UtcNow };
// Act
var result = await repo.UpdateAsync(updated);
// Assert
Assert.NotNull(result);
Assert.Equal("Updated Name", result.Name);
var retrieved = await repo.GetByIdAsync(_tenantId, profile.ProfileId);
Assert.Equal("Updated Name", retrieved?.Name);
}
[Fact]
public async Task ProfileRepo_ArchiveAsync_SetsArchivedStatus()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile();
await repo.CreateAsync(profile);
// Act
var result = await repo.ArchiveAsync(_tenantId, profile.ProfileId);
// Assert
Assert.True(result);
var retrieved = await repo.GetByIdAsync(_tenantId, profile.ProfileId);
Assert.NotNull(retrieved);
Assert.Equal(ExportProfileStatus.Archived, retrieved.Status);
Assert.NotNull(retrieved.ArchivedAt);
}
[Fact]
public async Task ProfileRepo_IsNameUniqueAsync_ReturnsTrueForUniqueName()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile("Existing Profile");
await repo.CreateAsync(profile);
// Act
var isUnique = await repo.IsNameUniqueAsync(_tenantId, "New Profile Name");
// Assert
Assert.True(isUnique);
}
[Fact]
public async Task ProfileRepo_IsNameUniqueAsync_ReturnsFalseForDuplicateName()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile("Existing Profile");
await repo.CreateAsync(profile);
// Act
var isUnique = await repo.IsNameUniqueAsync(_tenantId, "Existing Profile");
// Assert
Assert.False(isUnique);
}
[Fact]
public async Task ProfileRepo_IsNameUniqueAsync_ExcludesSpecifiedProfile()
{
// Arrange
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
var profile = CreateTestProfile("Existing Profile");
await repo.CreateAsync(profile);
// Act
var isUnique = await repo.IsNameUniqueAsync(_tenantId, "Existing Profile", profile.ProfileId);
// Assert
Assert.True(isUnique);
}
// ========================================================================
// Run Repository Tests
// ========================================================================
[Fact]
public async Task RunRepo_CreateAsync_StoresRun()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var run = CreateTestRun();
// Act
var created = await repo.CreateAsync(run);
// Assert
Assert.Equal(run.RunId, created.RunId);
Assert.Equal(run.ProfileId, created.ProfileId);
}
[Fact]
public async Task RunRepo_GetByIdAsync_ReturnsStoredRun()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var run = CreateTestRun();
await repo.CreateAsync(run);
// Act
var retrieved = await repo.GetByIdAsync(_tenantId, run.RunId);
// Assert
Assert.NotNull(retrieved);
Assert.Equal(run.RunId, retrieved.RunId);
}
[Fact]
public async Task RunRepo_ListAsync_FiltersByProfileId()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var profileId1 = Guid.NewGuid();
var profileId2 = Guid.NewGuid();
var run1 = CreateTestRun() with { ProfileId = profileId1 };
var run2 = CreateTestRun() with { ProfileId = profileId2 };
await repo.CreateAsync(run1);
await repo.CreateAsync(run2);
// Act
var (items, totalCount) = await repo.ListAsync(_tenantId, profileId: profileId1);
// Assert
Assert.Equal(1, totalCount);
Assert.Single(items);
Assert.Equal(profileId1, items[0].ProfileId);
}
[Fact]
public async Task RunRepo_ListAsync_FiltersByStatus()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var runningRun = CreateTestRun() with { Status = ExportRunStatus.Running };
var completedRun = CreateTestRun() with { Status = ExportRunStatus.Completed };
await repo.CreateAsync(runningRun);
await repo.CreateAsync(completedRun);
// Act
var (items, totalCount) = await repo.ListAsync(_tenantId, status: ExportRunStatus.Running);
// Assert
Assert.Equal(1, totalCount);
Assert.Single(items);
Assert.Equal(ExportRunStatus.Running, items[0].Status);
}
[Fact]
public async Task RunRepo_CancelAsync_CancelsQueuedRun()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var run = CreateTestRun() with { Status = ExportRunStatus.Queued };
await repo.CreateAsync(run);
// Act
var result = await repo.CancelAsync(_tenantId, run.RunId);
// Assert
Assert.True(result);
var retrieved = await repo.GetByIdAsync(_tenantId, run.RunId);
Assert.Equal(ExportRunStatus.Cancelled, retrieved?.Status);
}
[Fact]
public async Task RunRepo_CancelAsync_CancelsRunningRun()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var run = CreateTestRun() with { Status = ExportRunStatus.Running };
await repo.CreateAsync(run);
// Act
var result = await repo.CancelAsync(_tenantId, run.RunId);
// Assert
Assert.True(result);
}
[Fact]
public async Task RunRepo_CancelAsync_ReturnsFalseForCompletedRun()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var run = CreateTestRun() with { Status = ExportRunStatus.Completed };
await repo.CreateAsync(run);
// Act
var result = await repo.CancelAsync(_tenantId, run.RunId);
// Assert
Assert.False(result);
}
[Fact]
public async Task RunRepo_GetActiveRunsCountAsync_CountsRunningRuns()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Running });
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Running });
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Completed });
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Queued });
// Act
var count = await repo.GetActiveRunsCountAsync(_tenantId);
// Assert
Assert.Equal(2, count);
}
[Fact]
public async Task RunRepo_GetActiveRunsCountAsync_FiltersByProfileId()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
var profileId = Guid.NewGuid();
await repo.CreateAsync(CreateTestRun() with { ProfileId = profileId, Status = ExportRunStatus.Running });
await repo.CreateAsync(CreateTestRun() with { ProfileId = Guid.NewGuid(), Status = ExportRunStatus.Running });
// Act
var count = await repo.GetActiveRunsCountAsync(_tenantId, profileId);
// Assert
Assert.Equal(1, count);
}
[Fact]
public async Task RunRepo_GetQueuedRunsCountAsync_CountsQueuedRuns()
{
// Arrange
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Queued });
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Queued });
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Running });
// Act
var count = await repo.GetQueuedRunsCountAsync(_tenantId);
// Assert
Assert.Equal(2, count);
}
// ========================================================================
// Artifact Repository Tests
// ========================================================================
[Fact]
public async Task ArtifactRepo_CreateAsync_StoresArtifact()
{
// Arrange
var repo = new InMemoryExportArtifactRepository(NullLogger<InMemoryExportArtifactRepository>.Instance);
var artifact = CreateTestArtifact();
// Act
var created = await repo.CreateAsync(artifact);
// Assert
Assert.Equal(artifact.ArtifactId, created.ArtifactId);
Assert.Equal(artifact.Name, created.Name);
}
[Fact]
public async Task ArtifactRepo_GetByIdAsync_ReturnsStoredArtifact()
{
// Arrange
var repo = new InMemoryExportArtifactRepository(NullLogger<InMemoryExportArtifactRepository>.Instance);
var artifact = CreateTestArtifact();
await repo.CreateAsync(artifact);
// Act
var retrieved = await repo.GetByIdAsync(_tenantId, artifact.ArtifactId);
// Assert
Assert.NotNull(retrieved);
Assert.Equal(artifact.ArtifactId, retrieved.ArtifactId);
}
[Fact]
public async Task ArtifactRepo_ListByRunAsync_ReturnsArtifactsForRun()
{
// Arrange
var repo = new InMemoryExportArtifactRepository(NullLogger<InMemoryExportArtifactRepository>.Instance);
var runId = Guid.NewGuid();
var otherRunId = Guid.NewGuid();
await repo.CreateAsync(CreateTestArtifact() with { RunId = runId, Name = "artifact1.json" });
await repo.CreateAsync(CreateTestArtifact() with { RunId = runId, Name = "artifact2.json" });
await repo.CreateAsync(CreateTestArtifact() with { RunId = otherRunId, Name = "other.json" });
// Act
var artifacts = await repo.ListByRunAsync(_tenantId, runId);
// Assert
Assert.Equal(2, artifacts.Count);
Assert.All(artifacts, a => Assert.Equal(runId, a.RunId));
}
[Fact]
public async Task ArtifactRepo_DeleteByRunAsync_RemovesArtifactsForRun()
{
// Arrange
var repo = new InMemoryExportArtifactRepository(NullLogger<InMemoryExportArtifactRepository>.Instance);
var runId = Guid.NewGuid();
await repo.CreateAsync(CreateTestArtifact() with { RunId = runId, Name = "artifact1.json" });
await repo.CreateAsync(CreateTestArtifact() with { RunId = runId, Name = "artifact2.json" });
// Act
var deleted = await repo.DeleteByRunAsync(_tenantId, runId);
// Assert
Assert.Equal(2, deleted);
var remaining = await repo.ListByRunAsync(_tenantId, runId);
Assert.Empty(remaining);
}
// ========================================================================
// Test Helpers
// ========================================================================
private ExportProfile CreateTestProfile(string name = "Test Profile")
{
return new ExportProfile
{
ProfileId = Guid.NewGuid(),
TenantId = _tenantId,
Name = name,
Description = "Test profile description",
Kind = ExportProfileKind.AdHoc,
Status = ExportProfileStatus.Active,
CreatedAt = DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
};
}
private ExportRun CreateTestRun()
{
return new ExportRun
{
RunId = Guid.NewGuid(),
ProfileId = Guid.NewGuid(),
TenantId = _tenantId,
Status = ExportRunStatus.Running,
Trigger = ExportRunTrigger.Api,
CorrelationId = Guid.NewGuid().ToString(),
CreatedAt = DateTimeOffset.UtcNow
};
}
private ExportArtifact CreateTestArtifact()
{
return new ExportArtifact
{
ArtifactId = Guid.NewGuid(),
RunId = Guid.NewGuid(),
TenantId = _tenantId,
Name = "test-artifact.json",
Kind = "json",
Path = "/tmp/test-artifact.json",
SizeBytes = 1024,
ContentType = "application/json",
Checksum = "sha256:abc123",
CreatedAt = DateTimeOffset.UtcNow
};
}
}

View File

@@ -0,0 +1,233 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.WebService.Api;
namespace StellaOps.ExportCenter.Tests.Api;
public class ExportAuditServiceTests
{
private readonly Guid _tenantId = Guid.NewGuid();
private readonly ExportAuditService _auditService;
public ExportAuditServiceTests()
{
_auditService = new ExportAuditService(
NullLogger<ExportAuditService>.Instance,
TimeProvider.System);
}
[Fact]
public async Task LogProfileOperationAsync_CompletesWithoutError()
{
// Arrange
var profileId = Guid.NewGuid();
var userId = "user@example.com";
// Act & Assert - should not throw
await _auditService.LogProfileOperationAsync(
ExportAuditOperation.ProfileCreated,
_tenantId,
profileId,
userId,
new { Name = "Test Profile", Kind = "AdHoc" });
}
[Fact]
public async Task LogRunOperationAsync_CompletesWithoutError()
{
// Arrange
var runId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var userId = "user@example.com";
// Act & Assert - should not throw
await _auditService.LogRunOperationAsync(
ExportAuditOperation.RunStarted,
_tenantId,
runId,
profileId,
userId,
new { DryRun = false });
}
[Fact]
public async Task LogArtifactDownloadAsync_CompletesWithoutError()
{
// Arrange
var runId = Guid.NewGuid();
var artifactId = Guid.NewGuid();
var userId = "user@example.com";
var clientIp = "192.168.1.1";
// Act & Assert - should not throw
await _auditService.LogArtifactDownloadAsync(
_tenantId,
runId,
artifactId,
userId,
clientIp);
}
[Fact]
public async Task LogConcurrencyLimitAsync_CompletesWithoutError()
{
// Arrange
var profileId = Guid.NewGuid();
var userId = "user@example.com";
// Act & Assert - should not throw
await _auditService.LogConcurrencyLimitAsync(
_tenantId,
profileId,
"tenant",
4,
4,
userId);
}
[Fact]
public async Task LogProfileOperationAsync_HandlesNullDetails()
{
// Arrange
var profileId = Guid.NewGuid();
// Act & Assert - should not throw with null userId and details
await _auditService.LogProfileOperationAsync(
ExportAuditOperation.ProfileArchived,
_tenantId,
profileId,
null,
null);
}
[Fact]
public async Task LogRunOperationAsync_HandlesAllOperationTypes()
{
// Arrange
var runId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var operations = new[]
{
ExportAuditOperation.RunStarted,
ExportAuditOperation.RunCompleted,
ExportAuditOperation.RunFailed,
ExportAuditOperation.RunCancelled,
ExportAuditOperation.RunQueued
};
// Act & Assert - all operations should complete without error
foreach (var operation in operations)
{
await _auditService.LogRunOperationAsync(
operation,
_tenantId,
runId,
profileId,
"user@example.com");
}
}
[Fact]
public async Task LogProfileOperationAsync_ThrowsOnCancellation()
{
// Arrange
var profileId = Guid.NewGuid();
var cts = new CancellationTokenSource();
cts.Cancel();
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
await _auditService.LogProfileOperationAsync(
ExportAuditOperation.ProfileCreated,
_tenantId,
profileId,
"user@example.com",
cancellationToken: cts.Token));
}
[Fact]
public async Task LogRunOperationAsync_ThrowsOnCancellation()
{
// Arrange
var runId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var cts = new CancellationTokenSource();
cts.Cancel();
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
await _auditService.LogRunOperationAsync(
ExportAuditOperation.RunStarted,
_tenantId,
runId,
profileId,
"user@example.com",
cancellationToken: cts.Token));
}
[Fact]
public async Task LogArtifactDownloadAsync_ThrowsOnCancellation()
{
// Arrange
var runId = Guid.NewGuid();
var artifactId = Guid.NewGuid();
var cts = new CancellationTokenSource();
cts.Cancel();
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
await _auditService.LogArtifactDownloadAsync(
_tenantId,
runId,
artifactId,
"user@example.com",
"192.168.1.1",
cancellationToken: cts.Token));
}
[Fact]
public async Task LogConcurrencyLimitAsync_ThrowsOnCancellation()
{
// Arrange
var profileId = Guid.NewGuid();
var cts = new CancellationTokenSource();
cts.Cancel();
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
await _auditService.LogConcurrencyLimitAsync(
_tenantId,
profileId,
"tenant",
4,
4,
"user@example.com",
cancellationToken: cts.Token));
}
[Fact]
public async Task LogProfileOperationAsync_HandlesAllProfileOperations()
{
// Arrange
var profileId = Guid.NewGuid();
var operations = new[]
{
ExportAuditOperation.ProfileCreated,
ExportAuditOperation.ProfileUpdated,
ExportAuditOperation.ProfileArchived,
ExportAuditOperation.ProfileActivated,
ExportAuditOperation.ProfilePaused
};
// Act & Assert - all operations should complete without error
foreach (var operation in operations)
{
await _auditService.LogProfileOperationAsync(
operation,
_tenantId,
profileId,
"user@example.com");
}
}
}

View File

@@ -18,7 +18,7 @@ public sealed class BootstrapPackBuilderTests : IDisposable
{
_tempDir = Path.Combine(Path.GetTempPath(), $"bootstrap-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHash = new DefaultCryptoHash();
_cryptoHash = new FakeCryptoHash();
_builder = new BootstrapPackBuilder(_cryptoHash);
}
@@ -338,11 +338,12 @@ public sealed class BootstrapPackBuilderTests : IDisposable
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
var posixEntry = entry as PosixTarEntry;
entries.Add(new TarEntryMetadata(
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
posixEntry?.UserName ?? string.Empty,
posixEntry?.GroupName ?? string.Empty,
entry.ModificationTime));
}

View File

@@ -0,0 +1,573 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Encryption;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public class BundleEncryptionServiceTests : IDisposable
{
private readonly ICryptoHash _cryptoHash;
private readonly StubAgeKeyWrapper _ageKeyWrapper;
private readonly BundleEncryptionService _service;
private readonly string _tempDir;
public BundleEncryptionServiceTests()
{
_cryptoHash = new FakeCryptoHash();
_ageKeyWrapper = new StubAgeKeyWrapper(NullLogger<StubAgeKeyWrapper>.Instance);
_service = new BundleEncryptionService(
_cryptoHash,
NullLogger<BundleEncryptionService>.Instance,
_ageKeyWrapper,
null); // No KMS wrapper for tests
_tempDir = Path.Combine(Path.GetTempPath(), $"encryption-tests-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
try { Directory.Delete(_tempDir, true); } catch { }
}
}
[Fact]
public async Task EncryptAsync_WithModeNone_ReturnsSuccessWithoutEncryption()
{
var request = new BundleEncryptRequest
{
RunId = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
Options = new BundleEncryptionOptions { Mode = BundleEncryptionMode.None },
Files = []
};
var result = await _service.EncryptAsync(request);
Assert.True(result.Success);
Assert.Empty(result.EncryptedFiles);
Assert.Null(result.Metadata);
}
[Fact]
public async Task EncryptAsync_WithAgeMode_EncryptsFiles()
{
var (publicKey, _) = TestAgeKeyGenerator.GenerateKeyPair();
// Create source file
var sourceFile = Path.Combine(_tempDir, "source.txt");
var destFile = Path.Combine(_tempDir, "encrypted.bin");
var plaintext = "This is test content for encryption."u8.ToArray();
await File.WriteAllBytesAsync(sourceFile, plaintext);
var request = new BundleEncryptRequest
{
RunId = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey],
AadFormat = "{runId}:{relativePath}"
},
Files =
[
new BundleFileToEncrypt
{
RelativePath = "data/source.txt",
SourcePath = sourceFile,
DestinationPath = destFile
}
]
};
var result = await _service.EncryptAsync(request);
Assert.True(result.Success);
Assert.Single(result.EncryptedFiles);
Assert.NotNull(result.Metadata);
Assert.Equal("age", result.Metadata.Mode);
Assert.Single(result.Metadata.Recipients);
Assert.Equal("age", result.Metadata.Recipients[0].Type);
Assert.Equal(publicKey, result.Metadata.Recipients[0].Recipient);
// Verify encrypted file exists and is different from plaintext
Assert.True(File.Exists(destFile));
var encryptedContent = await File.ReadAllBytesAsync(destFile);
Assert.NotEqual(plaintext, encryptedContent);
// Encrypted should be larger (nonce + tag overhead)
Assert.True(encryptedContent.Length > plaintext.Length);
}
[Fact]
public async Task EncryptAsync_AndDecryptAsync_RoundTripsSuccessfully()
{
var (publicKey, privateKey) = TestAgeKeyGenerator.GenerateKeyPair();
var runId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
// Create source file
var sourceFile = Path.Combine(_tempDir, "source.txt");
var encryptedFile = Path.Combine(_tempDir, "encrypted.bin");
var decryptedFile = Path.Combine(_tempDir, "decrypted.txt");
var plaintext = "Round-trip test content with UTF-8: \u00e9\u00e8\u00ea"u8.ToArray();
await File.WriteAllBytesAsync(sourceFile, plaintext);
var encryptRequest = new BundleEncryptRequest
{
RunId = runId,
TenantId = tenantId,
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey],
AadFormat = "{runId}:{relativePath}"
},
Files =
[
new BundleFileToEncrypt
{
RelativePath = "data/test.txt",
SourcePath = sourceFile,
DestinationPath = encryptedFile
}
]
};
var encryptResult = await _service.EncryptAsync(encryptRequest);
Assert.True(encryptResult.Success);
// Now decrypt
var decryptRequest = new BundleDecryptRequest
{
RunId = runId,
TenantId = tenantId,
Metadata = encryptResult.Metadata!,
AgePrivateKey = privateKey,
Files =
[
new BundleFileToDecrypt
{
RelativePath = "data/test.txt",
SourcePath = encryptedFile,
DestinationPath = decryptedFile,
Nonce = encryptResult.EncryptedFiles[0].Nonce,
ExpectedHash = encryptResult.EncryptedFiles[0].PlaintextHash
}
]
};
var decryptResult = await _service.DecryptAsync(decryptRequest);
Assert.True(decryptResult.Success);
Assert.Single(decryptResult.DecryptedFiles);
Assert.True(decryptResult.DecryptedFiles[0].HashVerified);
// Verify decrypted content matches original
var decryptedContent = await File.ReadAllBytesAsync(decryptedFile);
Assert.Equal(plaintext, decryptedContent);
}
[Fact]
public async Task EncryptAsync_WithMultipleRecipients_WrapsForEach()
{
var (publicKey1, _) = TestAgeKeyGenerator.GenerateKeyPair();
var (publicKey2, _) = TestAgeKeyGenerator.GenerateKeyPair();
var sourceFile = Path.Combine(_tempDir, "source.txt");
var destFile = Path.Combine(_tempDir, "encrypted.bin");
await File.WriteAllTextAsync(sourceFile, "Test content");
var request = new BundleEncryptRequest
{
RunId = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey1, publicKey2],
AadFormat = "{runId}:{relativePath}"
},
Files =
[
new BundleFileToEncrypt
{
RelativePath = "test.txt",
SourcePath = sourceFile,
DestinationPath = destFile
}
]
};
var result = await _service.EncryptAsync(request);
Assert.True(result.Success);
Assert.NotNull(result.Metadata);
Assert.Equal(2, result.Metadata.Recipients.Count);
// Each recipient should have different wrapped key
var wrappedKey1 = result.Metadata.Recipients[0].WrappedKey;
var wrappedKey2 = result.Metadata.Recipients[1].WrappedKey;
Assert.NotEqual(wrappedKey1, wrappedKey2);
}
[Fact]
public async Task EncryptAsync_WithMultipleFiles_EncryptsAll()
{
var (publicKey, _) = TestAgeKeyGenerator.GenerateKeyPair();
// Create multiple source files
var files = new List<BundleFileToEncrypt>();
for (int i = 0; i < 3; i++)
{
var sourceFile = Path.Combine(_tempDir, $"source{i}.txt");
var destFile = Path.Combine(_tempDir, $"encrypted{i}.bin");
await File.WriteAllTextAsync(sourceFile, $"Content for file {i}");
files.Add(new BundleFileToEncrypt
{
RelativePath = $"data/file{i}.txt",
SourcePath = sourceFile,
DestinationPath = destFile
});
}
var request = new BundleEncryptRequest
{
RunId = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey],
AadFormat = "{runId}:{relativePath}"
},
Files = files
};
var result = await _service.EncryptAsync(request);
Assert.True(result.Success);
Assert.Equal(3, result.EncryptedFiles.Count);
// Each file should have unique nonce
var nonces = result.EncryptedFiles.Select(f => f.Nonce).ToHashSet();
Assert.Equal(3, nonces.Count);
}
[Fact]
public async Task DecryptAsync_WithWrongKey_Fails()
{
var (publicKey, _) = TestAgeKeyGenerator.GenerateKeyPair();
var (_, wrongPrivateKey) = TestAgeKeyGenerator.GenerateKeyPair();
var runId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
// Encrypt with one key
var sourceFile = Path.Combine(_tempDir, "source.txt");
var encryptedFile = Path.Combine(_tempDir, "encrypted.bin");
var decryptedFile = Path.Combine(_tempDir, "decrypted.txt");
await File.WriteAllTextAsync(sourceFile, "Secret content");
var encryptRequest = new BundleEncryptRequest
{
RunId = runId,
TenantId = tenantId,
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey],
AadFormat = "{runId}:{relativePath}"
},
Files =
[
new BundleFileToEncrypt
{
RelativePath = "test.txt",
SourcePath = sourceFile,
DestinationPath = encryptedFile
}
]
};
var encryptResult = await _service.EncryptAsync(encryptRequest);
Assert.True(encryptResult.Success);
// Try to decrypt with wrong key
var decryptRequest = new BundleDecryptRequest
{
RunId = runId,
TenantId = tenantId,
Metadata = encryptResult.Metadata!,
AgePrivateKey = wrongPrivateKey,
Files =
[
new BundleFileToDecrypt
{
RelativePath = "test.txt",
SourcePath = encryptedFile,
DestinationPath = decryptedFile,
Nonce = encryptResult.EncryptedFiles[0].Nonce
}
]
};
var decryptResult = await _service.DecryptAsync(decryptRequest);
// Should fail because wrong key was used
Assert.False(decryptResult.Success);
}
[Fact]
public async Task DecryptAsync_WithNoMatchingKey_ReturnsError()
{
var runId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
var metadata = new BundleEncryptionMetadata
{
Mode = "age",
AadFormat = "{runId}:{relativePath}",
Recipients = [] // No recipients
};
var decryptRequest = new BundleDecryptRequest
{
RunId = runId,
TenantId = tenantId,
Metadata = metadata,
AgePrivateKey = null, // No key
Files = []
};
var result = await _service.DecryptAsync(decryptRequest);
Assert.False(result.Success);
Assert.Contains("No matching key", result.ErrorMessage);
}
[Fact]
public void ValidateOptions_WithNoRecipients_ReturnsError()
{
var options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [],
AadFormat = "{runId}:{relativePath}"
};
var errors = _service.ValidateOptions(options);
Assert.NotEmpty(errors);
Assert.Contains(errors, e => e.Contains("recipient"));
}
[Fact]
public void ValidateOptions_WithInvalidRecipient_ReturnsError()
{
var options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = ["invalid-key"],
AadFormat = "{runId}:{relativePath}"
};
var errors = _service.ValidateOptions(options);
Assert.NotEmpty(errors);
Assert.Contains(errors, e => e.Contains("Invalid age public key"));
}
[Fact]
public void ValidateOptions_WithEmptyAadFormat_ReturnsError()
{
var (publicKey, _) = TestAgeKeyGenerator.GenerateKeyPair();
var options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey],
AadFormat = "" // Invalid
};
var errors = _service.ValidateOptions(options);
Assert.NotEmpty(errors);
Assert.Contains(errors, e => e.Contains("AAD format"));
}
[Fact]
public void ValidateOptions_WithKmsAndNoKeyId_ReturnsError()
{
var options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.AesGcmKms,
KmsKeyId = null,
AadFormat = "{runId}:{relativePath}"
};
var errors = _service.ValidateOptions(options);
Assert.NotEmpty(errors);
Assert.Contains(errors, e => e.Contains("KMS key ID"));
}
[Fact]
public void ValidateOptions_WithModeNone_ReturnsNoErrors()
{
var options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.None
};
var errors = _service.ValidateOptions(options);
Assert.Empty(errors);
}
[Fact]
public async Task EncryptAsync_WithNoRecipientsConfigured_ReturnsError()
{
var request = new BundleEncryptRequest
{
RunId = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [],
AadFormat = "{runId}:{relativePath}"
},
Files = []
};
var result = await _service.EncryptAsync(request);
Assert.False(result.Success);
Assert.Contains("recipient", result.ErrorMessage);
}
[Fact]
public async Task DecryptAsync_WithTamperedCiphertext_Fails()
{
var (publicKey, privateKey) = TestAgeKeyGenerator.GenerateKeyPair();
var runId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
var sourceFile = Path.Combine(_tempDir, "source.txt");
var encryptedFile = Path.Combine(_tempDir, "encrypted.bin");
var decryptedFile = Path.Combine(_tempDir, "decrypted.txt");
await File.WriteAllTextAsync(sourceFile, "Original content");
var encryptRequest = new BundleEncryptRequest
{
RunId = runId,
TenantId = tenantId,
Options = new BundleEncryptionOptions
{
Mode = BundleEncryptionMode.Age,
Recipients = [publicKey],
AadFormat = "{runId}:{relativePath}"
},
Files =
[
new BundleFileToEncrypt
{
RelativePath = "test.txt",
SourcePath = sourceFile,
DestinationPath = encryptedFile
}
]
};
var encryptResult = await _service.EncryptAsync(encryptRequest);
Assert.True(encryptResult.Success);
// Tamper with the encrypted file
var encryptedBytes = await File.ReadAllBytesAsync(encryptedFile);
encryptedBytes[20] ^= 0xFF; // Flip bits in ciphertext
await File.WriteAllBytesAsync(encryptedFile, encryptedBytes);
// Try to decrypt tampered file
var decryptRequest = new BundleDecryptRequest
{
RunId = runId,
TenantId = tenantId,
Metadata = encryptResult.Metadata!,
AgePrivateKey = privateKey,
Files =
[
new BundleFileToDecrypt
{
RelativePath = "test.txt",
SourcePath = encryptedFile,
DestinationPath = decryptedFile,
Nonce = encryptResult.EncryptedFiles[0].Nonce
}
]
};
var decryptResult = await _service.DecryptAsync(decryptRequest);
// Should fail due to authentication tag mismatch
Assert.False(decryptResult.Success);
}
private sealed class FakeCryptoHash : ICryptoHash
{
public byte[] ComputeHash(ReadOnlySpan<byte> data, string? algorithmId = null)
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
return sha256.ComputeHash(data.ToArray());
}
public string ComputeHashHex(ReadOnlySpan<byte> data, string? algorithmId = null)
{
var hash = ComputeHash(data, algorithmId);
return Convert.ToHexString(hash).ToLowerInvariant();
}
public string ComputeHashBase64(ReadOnlySpan<byte> data, string? algorithmId = null)
{
var hash = ComputeHash(data, algorithmId);
return Convert.ToBase64String(hash);
}
public ValueTask<byte[]> ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
var hash = sha256.ComputeHash(stream);
return new ValueTask<byte[]>(hash);
}
public async ValueTask<string> ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
var hash = await ComputeHashAsync(stream, algorithmId, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHash(data, null);
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashHex(data, null);
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashBase64(data, null);
public ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashAsync(stream, null, cancellationToken);
public ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashHexAsync(stream, null, cancellationToken);
public string GetAlgorithmForPurpose(string purpose) => "sha256";
public string GetHashPrefix(string purpose) => "sha256:";
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> GetHashPrefix(purpose) + ComputeHashHexForPurpose(data, purpose);
}
}

View File

@@ -0,0 +1,356 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.Core.Crypto.Encryption;
namespace StellaOps.ExportCenter.Tests.Crypto.Encryption;
public sealed class AesGcmBundleEncryptorTests
{
private readonly BundleEncryptionOptions _options = new();
private readonly Guid _tenantId = Guid.NewGuid();
private readonly Guid _runId = Guid.NewGuid();
private AesGcmBundleEncryptor CreateEncryptor(IKmsClient? kmsClient = null)
{
var factory = new BundleKeyWrapperFactory(
NullLogger<AgeBundleKeyWrapper>.Instance,
NullLogger<KmsBundleKeyWrapper>.Instance,
Options.Create(_options),
kmsClient);
return new AesGcmBundleEncryptor(
NullLogger<AesGcmBundleEncryptor>.Instance,
factory,
Options.Create(_options));
}
[Fact]
public async Task EncryptAsync_NoRecipients_ReturnsFailed()
{
var encryptor = CreateEncryptor();
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = "Hello, World!"u8.ToArray()
}
};
var result = await encryptor.EncryptAsync(request);
Assert.False(result.Success);
Assert.Contains("recipient", result.Error, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task EncryptAsync_EmptyFiles_ReturnsFailed()
{
var encryptor = CreateEncryptor();
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>(),
AgeRecipients = ["age1test123456789012345678901234567890123456789012345678901"]
};
var result = await encryptor.EncryptAsync(request);
Assert.False(result.Success);
Assert.Contains("files", result.Error, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task EncryptAsync_WithKms_EncryptsFiles()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var content = "Hello, World!"u8.ToArray();
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = content
},
KmsKeyId = "test-key"
};
var result = await encryptor.EncryptAsync(request);
Assert.True(result.Success);
Assert.Single(result.EncryptedFiles);
Assert.Contains("test.txt", result.EncryptedFiles.Keys);
Assert.NotEqual(content, result.EncryptedFiles["test.txt"]);
Assert.NotNull(result.Metadata);
Assert.Equal("aes-gcm+kms", result.Metadata.Mode);
}
[Fact]
public async Task EncryptAsync_SetsMetadata()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["data/file1.txt"] = "Content 1"u8.ToArray(),
["data/file2.bin"] = new byte[] { 1, 2, 3, 4, 5 }
},
KmsKeyId = "test-key"
};
var result = await encryptor.EncryptAsync(request);
Assert.True(result.Success);
Assert.NotNull(result.Metadata);
Assert.Equal(2, result.Metadata.Files.Count);
Assert.Equal("{runId}:{relativePath}", result.Metadata.AadFormat);
Assert.Equal("random-12", result.Metadata.NonceFormat);
Assert.Single(result.Metadata.Recipients);
}
[Fact]
public async Task EncryptAsync_IncludesFileHashes()
{
_options.IncludeFileHashes = true;
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = "Hello, World!"u8.ToArray()
},
KmsKeyId = "test-key"
};
var result = await encryptor.EncryptAsync(request);
Assert.True(result.Success);
var fileMetadata = result.Metadata!.Files.Single();
Assert.NotNull(fileMetadata.OriginalHash);
Assert.StartsWith("sha256:", fileMetadata.OriginalHash);
}
[Fact]
public async Task EncryptDecrypt_RoundTrip_Succeeds()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var originalContent = "Hello, World! This is a test."u8.ToArray();
var encryptRequest = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = originalContent
},
KmsKeyId = "test-key"
};
var encryptResult = await encryptor.EncryptAsync(encryptRequest);
Assert.True(encryptResult.Success);
var decryptRequest = new BundleDecryptRequest
{
RunId = _runId,
Metadata = encryptResult.Metadata!,
EncryptedFiles = encryptResult.EncryptedFiles
};
var decryptResult = await encryptor.DecryptAsync(decryptRequest);
Assert.True(decryptResult.Success);
Assert.Single(decryptResult.DecryptedFiles);
Assert.Equal(originalContent, decryptResult.DecryptedFiles["test.txt"]);
}
[Fact]
public async Task EncryptDecrypt_MultipleFiles_RoundTrip()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var files = new Dictionary<string, byte[]>
{
["data/file1.txt"] = "Content 1"u8.ToArray(),
["data/file2.txt"] = "Content 2"u8.ToArray(),
["binary.bin"] = new byte[] { 0x00, 0x01, 0x02, 0xFF, 0xFE, 0xFD }
};
var encryptRequest = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = files,
KmsKeyId = "test-key"
};
var encryptResult = await encryptor.EncryptAsync(encryptRequest);
Assert.True(encryptResult.Success);
Assert.Equal(3, encryptResult.EncryptedFiles.Count);
var decryptRequest = new BundleDecryptRequest
{
RunId = _runId,
Metadata = encryptResult.Metadata!,
EncryptedFiles = encryptResult.EncryptedFiles
};
var decryptResult = await encryptor.DecryptAsync(decryptRequest);
Assert.True(decryptResult.Success);
Assert.Equal(3, decryptResult.DecryptedFiles.Count);
foreach (var (path, original) in files)
{
Assert.True(decryptResult.DecryptedFiles.TryGetValue(path, out var decrypted));
Assert.Equal(original, decrypted);
}
}
[Fact]
public async Task DecryptAsync_WrongRunId_Fails()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var encryptRequest = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = "Hello, World!"u8.ToArray()
},
KmsKeyId = "test-key"
};
var encryptResult = await encryptor.EncryptAsync(encryptRequest);
Assert.True(encryptResult.Success);
// Try to decrypt with wrong run ID (AAD mismatch)
var decryptRequest = new BundleDecryptRequest
{
RunId = Guid.NewGuid(), // Wrong run ID
Metadata = encryptResult.Metadata!,
EncryptedFiles = encryptResult.EncryptedFiles
};
var decryptResult = await encryptor.DecryptAsync(decryptRequest);
// Decryption should fail due to AAD mismatch
Assert.False(decryptResult.Success);
}
[Fact]
public async Task VerifyDecryptedContentAsync_ValidContent_NoFailures()
{
_options.IncludeFileHashes = true;
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var originalContent = "Hello, World!"u8.ToArray();
var encryptRequest = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = originalContent
},
KmsKeyId = "test-key"
};
var encryptResult = await encryptor.EncryptAsync(encryptRequest);
var decryptRequest = new BundleDecryptRequest
{
RunId = _runId,
Metadata = encryptResult.Metadata!,
EncryptedFiles = encryptResult.EncryptedFiles
};
var decryptResult = await encryptor.DecryptAsync(decryptRequest);
var failures = await encryptor.VerifyDecryptedContentAsync(
decryptResult, encryptResult.Metadata!);
Assert.Empty(failures);
}
[Fact]
public async Task Metadata_RecipientsOrderedDeterministically()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
// Request with multiple age recipients (age public keys are 59+ chars)
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["test.txt"] = "Hello"u8.ToArray()
},
AgeRecipients = [
"age1zzz1234567890123456789012345678901234567890123456789012",
"age1aaa1234567890123456789012345678901234567890123456789012"
]
};
var result = await encryptor.EncryptAsync(request);
Assert.True(result.Success, $"Encryption failed: {result.Error}");
Assert.Equal(2, result.Metadata!.Recipients.Count);
// Recipients should be sorted by type, then by recipient/kmsKeyId
// Both are 'age' type, so sorted by recipient
var recipients = result.Metadata.Recipients.Select(r => r.Recipient).ToList();
var sortedRecipients = recipients.OrderBy(r => r).ToList();
Assert.Equal(sortedRecipients, recipients);
}
[Fact]
public async Task Metadata_FilesOrderedDeterministically()
{
var kmsClient = new StubKmsClient();
var encryptor = CreateEncryptor(kmsClient);
var request = new BundleEncryptRequest
{
RunId = _runId,
TenantId = _tenantId,
Files = new Dictionary<string, byte[]>
{
["z-file.txt"] = "Z"u8.ToArray(),
["a-file.txt"] = "A"u8.ToArray(),
["m-file.txt"] = "M"u8.ToArray()
},
KmsKeyId = "test-key"
};
var result = await encryptor.EncryptAsync(request);
Assert.True(result.Success);
Assert.Equal(3, result.Metadata!.Files.Count);
// Files should be sorted by path
Assert.Equal("a-file.txt", result.Metadata.Files[0].Path);
Assert.Equal("m-file.txt", result.Metadata.Files[1].Path);
Assert.Equal("z-file.txt", result.Metadata.Files[2].Path);
}
}

View File

@@ -51,7 +51,7 @@ public sealed class DevPortalOfflineBundleBuilderTests
new Dictionary<string, string> { ["releaseVersion"] = "2025.11.0" });
var fixedNow = new DateTimeOffset(2025, 11, 4, 12, 30, 0, TimeSpan.Zero);
var builder = new DevPortalOfflineBundleBuilder(new FixedTimeProvider(fixedNow));
var builder = new DevPortalOfflineBundleBuilder(new FakeCryptoHash(), new FixedTimeProvider(fixedNow));
var result = builder.Build(request, TestContext.Current.CancellationToken);
Assert.Equal(request.BundleId, result.Manifest.BundleId);
@@ -129,7 +129,7 @@ public sealed class DevPortalOfflineBundleBuilderTests
[Fact]
public void Build_ThrowsWhenNoContent()
{
var builder = new DevPortalOfflineBundleBuilder(new FixedTimeProvider(DateTimeOffset.UtcNow));
var builder = new DevPortalOfflineBundleBuilder(new FakeCryptoHash(), new FixedTimeProvider(DateTimeOffset.UtcNow));
var request = new DevPortalOfflineBundleRequest(Guid.NewGuid());
var exception = Assert.Throws<InvalidOperationException>(() => builder.Build(request, TestContext.Current.CancellationToken));
@@ -147,7 +147,7 @@ public sealed class DevPortalOfflineBundleBuilderTests
Directory.CreateDirectory(portalRoot);
File.WriteAllText(Path.Combine(portalRoot, "index.html"), "<html/>");
var builder = new DevPortalOfflineBundleBuilder(new FixedTimeProvider(DateTimeOffset.UtcNow));
var builder = new DevPortalOfflineBundleBuilder(new FakeCryptoHash(), new FixedTimeProvider(DateTimeOffset.UtcNow));
var result = builder.Build(new DevPortalOfflineBundleRequest(Guid.NewGuid(), portalRoot), TestContext.Current.CancellationToken);
Assert.Single(result.Manifest.Entries);
@@ -168,7 +168,7 @@ public sealed class DevPortalOfflineBundleBuilderTests
[Fact]
public void Build_ThrowsWhenSourceDirectoryMissing()
{
var builder = new DevPortalOfflineBundleBuilder(new FixedTimeProvider(DateTimeOffset.UtcNow));
var builder = new DevPortalOfflineBundleBuilder(new FakeCryptoHash(), new FixedTimeProvider(DateTimeOffset.UtcNow));
var missing = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N"));
var request = new DevPortalOfflineBundleRequest(Guid.NewGuid(), missing);

View File

@@ -46,7 +46,7 @@ public class DevPortalOfflineJobTests
var fixedNow = new DateTimeOffset(2025, 11, 4, 18, 15, 0, TimeSpan.Zero);
var timeProvider = new FixedTimeProvider(fixedNow);
var builder = new DevPortalOfflineBundleBuilder(timeProvider);
var builder = new DevPortalOfflineBundleBuilder(new FakeCryptoHash(), timeProvider);
var objectStore = new InMemoryObjectStore(timeProvider);
var signer = new TestManifestSigner(timeProvider);
var job = new DevPortalOfflineJob(builder, objectStore, signer, NullLogger<DevPortalOfflineJob>.Instance);
@@ -84,7 +84,7 @@ public class DevPortalOfflineJobTests
[Fact]
public async Task ExecuteAsync_SanitizesBundleFileName()
{
var builder = new DevPortalOfflineBundleBuilder(new FixedTimeProvider(DateTimeOffset.UtcNow));
var builder = new DevPortalOfflineBundleBuilder(new FakeCryptoHash(), new FixedTimeProvider(DateTimeOffset.UtcNow));
var objectStore = new InMemoryObjectStore(new FixedTimeProvider(DateTimeOffset.UtcNow));
var signer = new TestManifestSigner(new FixedTimeProvider(DateTimeOffset.UtcNow));
var job = new DevPortalOfflineJob(builder, objectStore, signer, NullLogger<DevPortalOfflineJob>.Instance);

View File

@@ -0,0 +1,501 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Domain;
using StellaOps.ExportCenter.WebService.Distribution;
namespace StellaOps.ExportCenter.Tests.Distribution;
public sealed class ExportDistributionLifecycleTests
{
private readonly InMemoryExportDistributionRepository _repository;
private readonly ExportDistributionLifecycle _lifecycle;
private readonly TestTimeProvider _timeProvider;
private readonly Guid _tenantId = Guid.NewGuid();
private readonly Guid _runId = Guid.NewGuid();
private readonly Guid _profileId = Guid.NewGuid();
public ExportDistributionLifecycleTests()
{
_repository = new InMemoryExportDistributionRepository();
_timeProvider = new TestTimeProvider(new DateTimeOffset(2024, 6, 15, 12, 0, 0, TimeSpan.Zero));
_lifecycle = new ExportDistributionLifecycle(
_repository,
NullLogger<ExportDistributionLifecycle>.Instance,
_timeProvider);
}
private sealed class TestTimeProvider : TimeProvider
{
private readonly DateTimeOffset _utcNow;
public TestTimeProvider(DateTimeOffset utcNow) => _utcNow = utcNow;
public override DateTimeOffset GetUtcNow() => _utcNow;
}
private ExportDistributionConfig CreateConfig(int targetCount = 1, int retentionDays = 30)
{
var targets = Enumerable.Range(0, targetCount).Select(i => new DistributionTargetConfig
{
TargetId = $"target-{i}",
Name = $"Target {i}",
Kind = ExportDistributionKind.OciRegistry,
Enabled = true,
Priority = i,
Oci = new OciTargetConfig
{
Registry = "registry.example.com",
RepositoryPrefix = "exports"
}
}).ToList();
return new ExportDistributionConfig
{
Targets = targets,
DefaultRetention = new ExportRetentionConfig
{
PolicyId = Guid.NewGuid(),
RetentionDays = retentionDays
}
};
}
private IReadOnlyList<DistributionArtifact> CreateArtifacts(int count = 1)
{
return Enumerable.Range(0, count).Select(i => new DistributionArtifact
{
ArtifactId = Guid.NewGuid(),
Path = $"/staging/artifact-{i}.tar.gz",
Name = $"artifact-{i}.tar.gz",
Hash = $"sha256:hash{i}",
SizeBytes = 1024 * (i + 1)
}).ToList();
}
[Fact]
public async Task InitializeDistributionsAsync_CreatesDistributionsForEachTargetAndArtifact()
{
var config = CreateConfig(targetCount: 2);
var artifacts = CreateArtifacts(count: 3);
var result = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
Assert.Equal(6, result.Count); // 2 targets x 3 artifacts
}
[Fact]
public async Task InitializeDistributionsAsync_SetsIdempotencyKey()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var result = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
Assert.Single(result);
Assert.NotNull(result[0].IdempotencyKey);
Assert.Contains(_runId.ToString("N"), result[0].IdempotencyKey);
}
[Fact]
public async Task InitializeDistributionsAsync_IsIdempotent()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var first = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var second = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
Assert.Single(first);
Assert.Single(second);
Assert.Equal(first[0].DistributionId, second[0].DistributionId);
}
[Fact]
public async Task InitializeDistributionsAsync_SetsRetentionExpiry()
{
var config = CreateConfig(retentionDays: 90);
var artifacts = CreateArtifacts();
var result = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
Assert.NotNull(result[0].RetentionExpiresAt);
Assert.Equal(_timeProvider.GetUtcNow().AddDays(90), result[0].RetentionExpiresAt);
}
[Fact]
public async Task UpdateDistributionStatusAsync_UpdatesStatus()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var result = await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributionId, ExportDistributionStatus.Distributing);
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Distributing, result.Status);
}
[Fact]
public async Task UpdateDistributionStatusAsync_NotFound_ReturnsNull()
{
var result = await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, Guid.NewGuid(), ExportDistributionStatus.Distributing);
Assert.Null(result);
}
[Fact]
public async Task UpdateDistributionStatusAsync_SetsDistributedAt()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var result = await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributionId, ExportDistributionStatus.Distributed);
Assert.NotNull(result?.DistributedAt);
Assert.Equal(_timeProvider.GetUtcNow(), result!.DistributedAt);
}
[Fact]
public async Task UpdateDistributionStatusAsync_SetsVerifiedAt()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var result = await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributionId, ExportDistributionStatus.Verified);
Assert.NotNull(result?.VerifiedAt);
Assert.Equal(_timeProvider.GetUtcNow(), result!.VerifiedAt);
}
[Fact]
public async Task RecordOciDistributionAsync_RecordsMetadata()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var result = await _lifecycle.RecordOciDistributionAsync(
_tenantId,
distributionId,
"sha256:manifestdigest",
"registry.example.com/exports/test:v1",
2048);
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Distributed, result.Status);
Assert.Equal("sha256:manifestdigest", result.OciManifestDigest);
Assert.Equal("registry.example.com/exports/test:v1", result.OciImageReference);
Assert.Equal(2048, result.SizeBytes);
}
[Fact]
public async Task RecordObjectStorageDistributionAsync_RecordsMetadata()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var result = await _lifecycle.RecordObjectStorageDistributionAsync(
_tenantId,
distributionId,
"s3://bucket/key",
"etag123",
"v1",
4096);
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Distributed, result.Status);
Assert.Equal(4096, result.SizeBytes);
Assert.Contains("s3://bucket/key", result.MetadataJson!);
}
[Fact]
public async Task RecordDistributionFailureAsync_RecordsError()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var result = await _lifecycle.RecordDistributionFailureAsync(
_tenantId,
distributionId,
"NETWORK_ERROR",
"Connection timeout");
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Failed, result.Status);
Assert.Equal(1, result.AttemptCount);
Assert.Contains("NETWORK_ERROR", result.ErrorJson!);
Assert.Contains("Connection timeout", result.ErrorJson!);
}
[Fact]
public async Task RecordVerificationAsync_Verified_SetsStatus()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributionId, ExportDistributionStatus.Distributed);
var result = await _lifecycle.RecordVerificationAsync(
_tenantId, distributionId, verified: true, "Hash match confirmed");
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Verified, result.Status);
Assert.Contains("\"verified\":true", result.MetadataJson!);
}
[Fact]
public async Task RecordVerificationAsync_NotVerified_SetsFailed()
{
var config = CreateConfig();
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributionId, ExportDistributionStatus.Distributed);
var result = await _lifecycle.RecordVerificationAsync(
_tenantId, distributionId, verified: false, "Hash mismatch");
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Failed, result.Status);
}
[Fact]
public async Task ApplyRetentionPolicyAsync_UpdatesExpiry()
{
var config = CreateConfig(retentionDays: 30);
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var newRetention = new ExportRetentionConfig
{
PolicyId = Guid.NewGuid(),
RetentionDays = 90
};
var result = await _lifecycle.ApplyRetentionPolicyAsync(
_tenantId, distributionId, newRetention);
Assert.NotNull(result);
Assert.Equal(_timeProvider.GetUtcNow().AddDays(90), result.RetentionExpiresAt);
}
[Fact]
public async Task ApplyRetentionPolicyAsync_ImmutablePreventsShorter()
{
var config = CreateConfig(retentionDays: 90);
var artifacts = CreateArtifacts();
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
var distributionId = distributions[0].DistributionId;
var shorterRetention = new ExportRetentionConfig
{
PolicyId = Guid.NewGuid(),
RetentionDays = 30,
Immutable = true
};
var result = await _lifecycle.ApplyRetentionPolicyAsync(
_tenantId, distributionId, shorterRetention);
// Should return original distribution without changes
Assert.NotNull(result);
Assert.Equal(_timeProvider.GetUtcNow().AddDays(90), result.RetentionExpiresAt);
}
[Fact]
public async Task GetRunDistributionStatusAsync_ReturnsStats()
{
var config = CreateConfig();
var artifacts = CreateArtifacts(count: 3);
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[0].DistributionId, ExportDistributionStatus.Distributed);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[1].DistributionId, ExportDistributionStatus.Failed);
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.Equal(_runId, result.RunId);
Assert.Equal(3, result.Stats.Total);
Assert.Equal(1, result.Stats.Pending);
Assert.Equal(1, result.Stats.Distributed);
Assert.Equal(1, result.Stats.Failed);
}
[Fact]
public async Task GetRunDistributionStatusAsync_NoDistributions_ReturnsNone()
{
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.Equal(DistributionOverallStatus.None, result.Status);
}
[Fact]
public async Task GetRunDistributionStatusAsync_AllPending_ReturnsPending()
{
var config = CreateConfig();
var artifacts = CreateArtifacts(count: 2);
await _lifecycle.InitializeDistributionsAsync(_runId, _profileId, _tenantId, config, artifacts);
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.Equal(DistributionOverallStatus.Pending, result.Status);
}
[Fact]
public async Task GetRunDistributionStatusAsync_SomeInProgress_ReturnsInProgress()
{
var config = CreateConfig();
var artifacts = CreateArtifacts(count: 2);
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[0].DistributionId, ExportDistributionStatus.Distributing);
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.Equal(DistributionOverallStatus.InProgress, result.Status);
}
[Fact]
public async Task CancelPendingDistributionsAsync_CancelsPending()
{
var config = CreateConfig();
var artifacts = CreateArtifacts(count: 3);
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[0].DistributionId, ExportDistributionStatus.Distributed);
var cancelled = await _lifecycle.CancelPendingDistributionsAsync(_tenantId, _runId);
Assert.Equal(2, cancelled);
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.Equal(2, result.Stats.Cancelled);
Assert.Equal(1, result.Stats.Distributed);
}
[Fact]
public async Task ProcessExpiredDistributionsAsync_MarksExpired()
{
// Create distribution with past expiry
var distribution = new ExportDistribution
{
DistributionId = Guid.NewGuid(),
RunId = _runId,
TenantId = _tenantId,
Kind = ExportDistributionKind.OciRegistry,
Status = ExportDistributionStatus.Distributed,
Target = "test",
ArtifactPath = "/test",
RetentionExpiresAt = _timeProvider.GetUtcNow().AddDays(-1),
CreatedAt = _timeProvider.GetUtcNow().AddDays(-30)
};
await _repository.CreateAsync(distribution);
var processed = await _lifecycle.ProcessExpiredDistributionsAsync();
Assert.Equal(1, processed);
var updated = await _repository.GetByIdAsync(_tenantId, distribution.DistributionId);
Assert.True(updated?.MarkedForDeletion);
}
[Fact]
public async Task ProcessExpiredDistributionsAsync_SkipsLegalHold()
{
var distribution = new ExportDistribution
{
DistributionId = Guid.NewGuid(),
RunId = _runId,
TenantId = _tenantId,
Kind = ExportDistributionKind.OciRegistry,
Status = ExportDistributionStatus.Distributed,
Target = "test",
ArtifactPath = "/test",
RetentionExpiresAt = _timeProvider.GetUtcNow().AddDays(-1),
MetadataJson = "{\"legalHold\":true}",
CreatedAt = _timeProvider.GetUtcNow().AddDays(-30)
};
await _repository.CreateAsync(distribution);
var processed = await _lifecycle.ProcessExpiredDistributionsAsync();
Assert.Equal(0, processed);
}
[Fact]
public async Task RunDistributionStatus_IsComplete_WhenNoPendingOrInProgress()
{
var config = CreateConfig();
var artifacts = CreateArtifacts(count: 2);
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[0].DistributionId, ExportDistributionStatus.Distributed);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[1].DistributionId, ExportDistributionStatus.Verified);
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.True(result.IsComplete);
}
[Fact]
public async Task RunDistributionStatus_HasFailures_WhenAnyFailed()
{
var config = CreateConfig();
var artifacts = CreateArtifacts(count: 2);
var distributions = await _lifecycle.InitializeDistributionsAsync(
_runId, _profileId, _tenantId, config, artifacts);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[0].DistributionId, ExportDistributionStatus.Distributed);
await _lifecycle.UpdateDistributionStatusAsync(
_tenantId, distributions[1].DistributionId, ExportDistributionStatus.Failed);
var result = await _lifecycle.GetRunDistributionStatusAsync(_tenantId, _runId);
Assert.True(result.HasFailures);
}
}

View File

@@ -0,0 +1,342 @@
using StellaOps.ExportCenter.Core.Domain;
using StellaOps.ExportCenter.WebService.Distribution;
namespace StellaOps.ExportCenter.Tests.Distribution;
public sealed class InMemoryExportDistributionRepositoryTests
{
private readonly InMemoryExportDistributionRepository _repository = new();
private readonly Guid _tenantId = Guid.NewGuid();
private readonly Guid _runId = Guid.NewGuid();
private ExportDistribution CreateDistribution(
Guid? distributionId = null,
Guid? tenantId = null,
Guid? runId = null,
string? idempotencyKey = null,
ExportDistributionStatus status = ExportDistributionStatus.Pending)
{
return new ExportDistribution
{
DistributionId = distributionId ?? Guid.NewGuid(),
RunId = runId ?? _runId,
TenantId = tenantId ?? _tenantId,
Kind = ExportDistributionKind.OciRegistry,
Status = status,
Target = "registry.example.com/test",
ArtifactPath = "/exports/test.tar.gz",
ArtifactHash = "sha256:abc123",
SizeBytes = 1024,
IdempotencyKey = idempotencyKey,
CreatedAt = DateTimeOffset.UtcNow
};
}
[Fact]
public async Task CreateAsync_AddsDistribution()
{
var distribution = CreateDistribution();
var result = await _repository.CreateAsync(distribution);
Assert.Equal(distribution.DistributionId, result.DistributionId);
}
[Fact]
public async Task CreateAsync_DuplicateId_Throws()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
await Assert.ThrowsAsync<InvalidOperationException>(() =>
_repository.CreateAsync(distribution));
}
[Fact]
public async Task GetByIdAsync_ReturnsDistribution()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var result = await _repository.GetByIdAsync(_tenantId, distribution.DistributionId);
Assert.NotNull(result);
Assert.Equal(distribution.DistributionId, result.DistributionId);
}
[Fact]
public async Task GetByIdAsync_WrongTenant_ReturnsNull()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var result = await _repository.GetByIdAsync(Guid.NewGuid(), distribution.DistributionId);
Assert.Null(result);
}
[Fact]
public async Task GetByIdAsync_NotFound_ReturnsNull()
{
var result = await _repository.GetByIdAsync(_tenantId, Guid.NewGuid());
Assert.Null(result);
}
[Fact]
public async Task GetByIdempotencyKeyAsync_ReturnsDistribution()
{
var idempotencyKey = "test-key-123";
var distribution = CreateDistribution(idempotencyKey: idempotencyKey);
await _repository.CreateAsync(distribution);
var result = await _repository.GetByIdempotencyKeyAsync(_tenantId, idempotencyKey);
Assert.NotNull(result);
Assert.Equal(idempotencyKey, result.IdempotencyKey);
}
[Fact]
public async Task GetByIdempotencyKeyAsync_NotFound_ReturnsNull()
{
var result = await _repository.GetByIdempotencyKeyAsync(_tenantId, "nonexistent");
Assert.Null(result);
}
[Fact]
public async Task ListByRunAsync_ReturnsDistributionsForRun()
{
var distribution1 = CreateDistribution();
var distribution2 = CreateDistribution();
var otherRunDistribution = CreateDistribution(runId: Guid.NewGuid());
await _repository.CreateAsync(distribution1);
await _repository.CreateAsync(distribution2);
await _repository.CreateAsync(otherRunDistribution);
var result = await _repository.ListByRunAsync(_tenantId, _runId);
Assert.Equal(2, result.Count);
}
[Fact]
public async Task ListByStatusAsync_FiltersCorrectly()
{
var pending = CreateDistribution(status: ExportDistributionStatus.Pending);
var distributed = CreateDistribution(status: ExportDistributionStatus.Distributed);
await _repository.CreateAsync(pending);
await _repository.CreateAsync(distributed);
var result = await _repository.ListByStatusAsync(_tenantId, ExportDistributionStatus.Pending);
Assert.Single(result);
Assert.Equal(ExportDistributionStatus.Pending, result[0].Status);
}
[Fact]
public async Task ListExpiredAsync_ReturnsOnlyExpired()
{
var now = DateTimeOffset.UtcNow;
var expired = new ExportDistribution
{
DistributionId = Guid.NewGuid(),
RunId = _runId,
TenantId = _tenantId,
Kind = ExportDistributionKind.OciRegistry,
Status = ExportDistributionStatus.Distributed,
Target = "test",
ArtifactPath = "/test",
RetentionExpiresAt = now.AddDays(-1),
MarkedForDeletion = false,
CreatedAt = now.AddDays(-30)
};
var notExpired = new ExportDistribution
{
DistributionId = Guid.NewGuid(),
RunId = _runId,
TenantId = _tenantId,
Kind = ExportDistributionKind.OciRegistry,
Status = ExportDistributionStatus.Distributed,
Target = "test",
ArtifactPath = "/test",
RetentionExpiresAt = now.AddDays(30),
MarkedForDeletion = false,
CreatedAt = now.AddDays(-30)
};
await _repository.CreateAsync(expired);
await _repository.CreateAsync(notExpired);
var result = await _repository.ListExpiredAsync(now);
Assert.Single(result);
Assert.Equal(expired.DistributionId, result[0].DistributionId);
}
[Fact]
public async Task UpdateAsync_UpdatesDistribution()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var updated = new ExportDistribution
{
DistributionId = distribution.DistributionId,
RunId = distribution.RunId,
TenantId = distribution.TenantId,
Kind = distribution.Kind,
Status = ExportDistributionStatus.Distributed,
Target = distribution.Target,
ArtifactPath = distribution.ArtifactPath,
SizeBytes = 2048,
CreatedAt = distribution.CreatedAt
};
var result = await _repository.UpdateAsync(updated);
Assert.NotNull(result);
Assert.Equal(ExportDistributionStatus.Distributed, result.Status);
Assert.Equal(2048, result.SizeBytes);
}
[Fact]
public async Task UpdateAsync_WrongTenant_ReturnsNull()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var updated = new ExportDistribution
{
DistributionId = distribution.DistributionId,
RunId = distribution.RunId,
TenantId = Guid.NewGuid(), // Different tenant
Kind = distribution.Kind,
Status = ExportDistributionStatus.Distributed,
Target = distribution.Target,
ArtifactPath = distribution.ArtifactPath,
CreatedAt = distribution.CreatedAt
};
var result = await _repository.UpdateAsync(updated);
Assert.Null(result);
}
[Fact]
public async Task UpsertByIdempotencyKeyAsync_CreatesNew()
{
var distribution = CreateDistribution(idempotencyKey: "new-key");
var (result, wasCreated) = await _repository.UpsertByIdempotencyKeyAsync(distribution);
Assert.True(wasCreated);
Assert.Equal(distribution.DistributionId, result.DistributionId);
}
[Fact]
public async Task UpsertByIdempotencyKeyAsync_ReturnsExisting()
{
var existing = CreateDistribution(idempotencyKey: "existing-key");
await _repository.CreateAsync(existing);
var duplicate = CreateDistribution(idempotencyKey: "existing-key");
var (result, wasCreated) = await _repository.UpsertByIdempotencyKeyAsync(duplicate);
Assert.False(wasCreated);
Assert.Equal(existing.DistributionId, result.DistributionId);
}
[Fact]
public async Task UpsertByIdempotencyKeyAsync_RequiresIdempotencyKey()
{
var distribution = CreateDistribution(idempotencyKey: null);
await Assert.ThrowsAsync<ArgumentException>(() =>
_repository.UpsertByIdempotencyKeyAsync(distribution));
}
[Fact]
public async Task MarkForDeletionAsync_MarksDistribution()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var result = await _repository.MarkForDeletionAsync(_tenantId, distribution.DistributionId);
Assert.True(result);
var updated = await _repository.GetByIdAsync(_tenantId, distribution.DistributionId);
Assert.True(updated?.MarkedForDeletion);
Assert.NotNull(updated?.DeletedAt);
}
[Fact]
public async Task MarkForDeletionAsync_WrongTenant_ReturnsFalse()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var result = await _repository.MarkForDeletionAsync(Guid.NewGuid(), distribution.DistributionId);
Assert.False(result);
}
[Fact]
public async Task DeleteAsync_RemovesDistribution()
{
var distribution = CreateDistribution();
await _repository.CreateAsync(distribution);
var result = await _repository.DeleteAsync(_tenantId, distribution.DistributionId);
Assert.True(result);
var deleted = await _repository.GetByIdAsync(_tenantId, distribution.DistributionId);
Assert.Null(deleted);
}
[Fact]
public async Task DeleteAsync_RemovesIdempotencyIndex()
{
var distribution = CreateDistribution(idempotencyKey: "delete-key");
await _repository.CreateAsync(distribution);
await _repository.DeleteAsync(_tenantId, distribution.DistributionId);
var byKey = await _repository.GetByIdempotencyKeyAsync(_tenantId, "delete-key");
Assert.Null(byKey);
}
[Fact]
public async Task GetStatsAsync_ReturnsCorrectCounts()
{
await _repository.CreateAsync(CreateDistribution(status: ExportDistributionStatus.Pending));
await _repository.CreateAsync(CreateDistribution(status: ExportDistributionStatus.Pending));
await _repository.CreateAsync(CreateDistribution(status: ExportDistributionStatus.Distributed));
await _repository.CreateAsync(CreateDistribution(status: ExportDistributionStatus.Failed));
var stats = await _repository.GetStatsAsync(_tenantId, _runId);
Assert.Equal(4, stats.Total);
Assert.Equal(2, stats.Pending);
Assert.Equal(1, stats.Distributed);
Assert.Equal(1, stats.Failed);
}
[Fact]
public void Clear_RemovesAllDistributions()
{
_repository.CreateAsync(CreateDistribution()).GetAwaiter().GetResult();
_repository.CreateAsync(CreateDistribution()).GetAwaiter().GetResult();
_repository.Clear();
var result = _repository.ListByRunAsync(_tenantId, _runId).GetAwaiter().GetResult();
Assert.Empty(result);
}
}

View File

@@ -0,0 +1,196 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Distribution.Oci;
namespace StellaOps.ExportCenter.Tests.Distribution.Oci;
public class OciDistributionClientTests
{
private readonly OciDistributionOptions _defaultOptions;
public OciDistributionClientTests()
{
_defaultOptions = new OciDistributionOptions
{
Enabled = true,
DefaultRegistry = "registry.example.com",
RepositoryPrefix = "exports"
};
}
[Fact]
public void IsEnabled_WithValidConfig_ReturnsTrue()
{
var client = CreateClient(_defaultOptions);
Assert.True(client.IsEnabled);
}
[Fact]
public void IsEnabled_WhenDisabled_ReturnsFalse()
{
var options = new OciDistributionOptions
{
Enabled = false,
DefaultRegistry = "registry.example.com"
};
var client = CreateClient(options);
Assert.False(client.IsEnabled);
}
[Fact]
public void IsEnabled_WithoutRegistry_ReturnsFalse()
{
var options = new OciDistributionOptions
{
Enabled = true,
DefaultRegistry = null
};
var client = CreateClient(options);
Assert.False(client.IsEnabled);
}
[Fact]
public void BuildExportReference_WithTenantAndRun_ReturnsValidReference()
{
var client = CreateClient(_defaultOptions);
var tenantId = Guid.Parse("12345678-1234-1234-1234-123456789012");
var runId = Guid.Parse("abcdefab-abcd-abcd-abcd-abcdefabcdef");
var reference = client.BuildExportReference(tenantId, runId);
Assert.Equal("registry.example.com", reference.Registry);
Assert.Equal("exports/12345678123412341234123456789012", reference.Repository);
Assert.Equal("abcdefababcdabcdabcdabcdefabcdef", reference.Tag);
}
[Fact]
public void BuildExportReference_WithCustomTag_UsesTag()
{
var client = CreateClient(_defaultOptions);
var tenantId = Guid.NewGuid();
var runId = Guid.NewGuid();
var reference = client.BuildExportReference(tenantId, runId, "v1.0.0");
Assert.Equal("v1.0.0", reference.Tag);
}
[Fact]
public void BuildExportReference_WithoutDefaultRegistry_Throws()
{
var options = new OciDistributionOptions
{
Enabled = true,
DefaultRegistry = null
};
var client = CreateClient(options);
Assert.Throws<InvalidOperationException>(() =>
client.BuildExportReference(Guid.NewGuid(), Guid.NewGuid()));
}
[Fact]
public void GetAuthorization_WithRegistryAuth_ReturnsSpecificAuth()
{
var options = new OciDistributionOptions
{
Enabled = true,
DefaultRegistry = "registry.example.com",
RegistryAuth = new Dictionary<string, OciRegistryAuthOptions>
{
["ghcr.io"] = new OciRegistryAuthOptions
{
Username = "user",
Password = "pass"
}
}
};
var client = CreateClient(options);
var auth = client.GetAuthorization("ghcr.io");
Assert.Equal(OciRegistryAuthMode.Basic, auth.Mode);
Assert.Equal("user", auth.Username);
}
[Fact]
public void GetAuthorization_WithDefaultAuth_ReturnsDefault()
{
var options = new OciDistributionOptions
{
Enabled = true,
DefaultRegistry = "registry.example.com",
Authentication = new OciRegistryAuthOptions
{
IdentityToken = "token123"
}
};
var client = CreateClient(options);
var auth = client.GetAuthorization("other-registry.io");
Assert.Equal(OciRegistryAuthMode.IdentityToken, auth.Mode);
Assert.Equal("token123", auth.IdentityToken);
}
[Fact]
public void GetAuthorization_WithNoAuth_ReturnsAnonymous()
{
var client = CreateClient(_defaultOptions);
var auth = client.GetAuthorization("public-registry.io");
Assert.Equal(OciRegistryAuthMode.Anonymous, auth.Mode);
}
[Fact]
public async Task PushAsync_WhenDisabled_ReturnsError()
{
var options = new OciDistributionOptions
{
Enabled = false,
DefaultRegistry = "registry.example.com"
};
var client = CreateClient(options);
var result = await client.PushAsync(new OciPushRequest
{
Reference = "registry.example.com/test:v1",
Layers = []
});
Assert.False(result.Success);
Assert.Equal("ERR_OCI_DISABLED", result.ErrorCode);
}
[Fact]
public async Task PushAsync_WithInvalidReference_ReturnsError()
{
var client = CreateClient(_defaultOptions);
var result = await client.PushAsync(new OciPushRequest
{
Reference = "",
Layers = []
});
Assert.False(result.Success);
Assert.Equal("ERR_OCI_INVALID_REF", result.ErrorCode);
}
private static OciDistributionClient CreateClient(OciDistributionOptions options)
{
return new OciDistributionClient(
new TestHttpClientFactory(),
Options.Create(options),
NullLogger<OciDistributionClient>.Instance);
}
private sealed class TestHttpClientFactory : IHttpClientFactory
{
public HttpClient CreateClient(string name) => new();
}
}

View File

@@ -0,0 +1,207 @@
using StellaOps.ExportCenter.WebService.Distribution.Oci;
namespace StellaOps.ExportCenter.Tests.Distribution.Oci;
public class OciImageReferenceTests
{
[Theory]
[InlineData("docker.io/library/nginx:latest", "docker.io", "library/nginx", "latest", null)]
[InlineData("ghcr.io/stellaops/exports:v1.0.0", "ghcr.io", "stellaops/exports", "v1.0.0", null)]
[InlineData("registry.example.com/repo/image:tag", "registry.example.com", "repo/image", "tag", null)]
[InlineData("localhost:5000/test:dev", "localhost:5000", "test", "dev", null)]
public void Parse_WithTaggedReference_ExtractsComponents(
string reference, string expectedRegistry, string expectedRepo, string expectedTag, string? expectedDigest)
{
var result = OciImageReference.Parse(reference);
Assert.NotNull(result);
Assert.Equal(expectedRegistry, result.Registry);
Assert.Equal(expectedRepo, result.Repository);
Assert.Equal(expectedTag, result.Tag);
Assert.Equal(expectedDigest, result.Digest);
}
[Fact]
public void Parse_WithDigest_ExtractsDigest()
{
var reference = "ghcr.io/stellaops/exports@sha256:abc123def456";
var result = OciImageReference.Parse(reference);
Assert.NotNull(result);
Assert.Equal("ghcr.io", result.Registry);
Assert.Equal("stellaops/exports", result.Repository);
Assert.Null(result.Tag);
Assert.Equal("sha256:abc123def456", result.Digest);
Assert.True(result.HasDigest);
}
[Fact]
public void Parse_WithoutTag_UsesDefaultLatest()
{
var reference = "ghcr.io/stellaops/exports";
var result = OciImageReference.Parse(reference);
Assert.NotNull(result);
Assert.Null(result.Tag);
Assert.False(result.HasTag);
Assert.Contains(":latest", result.Canonical);
}
[Fact]
public void Parse_WithHttpScheme_SetsSchemeCorrectly()
{
var reference = "http://localhost:5000/test:dev";
var result = OciImageReference.Parse(reference);
Assert.NotNull(result);
Assert.Equal("http", result.Scheme);
Assert.Equal("localhost:5000", result.Registry);
}
[Fact]
public void Parse_WithoutRegistry_UsesDefault()
{
var reference = "nginx:latest";
var result = OciImageReference.Parse(reference, "docker.io");
Assert.NotNull(result);
Assert.Equal("docker.io", result.Registry);
Assert.Equal("library/nginx", result.Repository);
}
[Fact]
public void Parse_WithUserRepo_UsesDockerDefault()
{
var reference = "stellaops/scanner:v2";
var result = OciImageReference.Parse(reference, "docker.io");
Assert.NotNull(result);
Assert.Equal("docker.io", result.Registry);
Assert.Equal("stellaops/scanner", result.Repository);
Assert.Equal("v2", result.Tag);
}
[Fact]
public void Parse_EmptyString_ReturnsNull()
{
var result = OciImageReference.Parse("");
Assert.Null(result);
}
[Fact]
public void Parse_WhitespaceOnly_ReturnsNull()
{
var result = OciImageReference.Parse(" ");
Assert.Null(result);
}
[Fact]
public void Canonical_WithTag_ReturnsCorrectFormat()
{
var reference = OciImageReference.Parse("ghcr.io/stellaops/exports:v1.0.0");
Assert.NotNull(reference);
Assert.Equal("ghcr.io/stellaops/exports:v1.0.0", reference.Canonical);
}
[Fact]
public void Canonical_WithDigest_ReturnsDigestFormat()
{
var reference = OciImageReference.Parse("ghcr.io/stellaops/exports@sha256:abc123");
Assert.NotNull(reference);
Assert.Equal("ghcr.io/stellaops/exports@sha256:abc123", reference.Canonical);
}
[Fact]
public void WithDigest_CreatesNewReferenceWithDigest()
{
var reference = OciImageReference.Parse("ghcr.io/stellaops/exports:v1.0.0")!;
var withDigest = reference.WithDigest("sha256:abc123");
Assert.Equal("sha256:abc123", withDigest.Digest);
Assert.Null(withDigest.Tag);
Assert.Equal("ghcr.io/stellaops/exports@sha256:abc123", withDigest.Canonical);
}
[Fact]
public void WithTag_CreatesNewReferenceWithTag()
{
var reference = OciImageReference.Parse("ghcr.io/stellaops/exports@sha256:abc123")!;
var withTag = reference.WithTag("v2.0.0");
Assert.Equal("v2.0.0", withTag.Tag);
Assert.Null(withTag.Digest);
Assert.Equal("ghcr.io/stellaops/exports:v2.0.0", withTag.Canonical);
}
[Fact]
public void ForExport_CreatesCorrectReference()
{
var tenantId = Guid.Parse("12345678-1234-1234-1234-123456789012");
var runId = Guid.Parse("abcdefab-abcd-abcd-abcd-abcdefabcdef");
var reference = OciImageReference.ForExport(
"ghcr.io",
"stellaops/exports",
tenantId,
runId);
Assert.Equal("ghcr.io", reference.Registry);
Assert.Equal("stellaops/exports/12345678123412341234123456789012", reference.Repository);
Assert.Equal("abcdefababcdabcdabcdabcdefabcdef", reference.Tag);
}
[Fact]
public void ForExport_WithEmptyPrefix_OmitsPrefix()
{
var tenantId = Guid.Parse("12345678-1234-1234-1234-123456789012");
var runId = Guid.NewGuid();
var reference = OciImageReference.ForExport(
"registry.example.com",
"",
tenantId,
runId);
Assert.Equal("12345678123412341234123456789012", reference.Repository);
}
[Fact]
public void ForExport_WithCustomTag_UsesTag()
{
var reference = OciImageReference.ForExport(
"ghcr.io",
"exports",
Guid.NewGuid(),
Guid.NewGuid(),
"latest");
Assert.Equal("latest", reference.Tag);
}
[Fact]
public void RepositoryReference_ReturnsWithoutTagOrDigest()
{
var reference = OciImageReference.Parse("ghcr.io/stellaops/exports:v1.0.0")!;
Assert.Equal("ghcr.io/stellaops/exports", reference.RepositoryReference);
}
[Fact]
public void ToString_ReturnsSameAsCanonical()
{
var reference = OciImageReference.Parse("ghcr.io/stellaops/exports:v1.0.0")!;
Assert.Equal(reference.Canonical, reference.ToString());
}
}

View File

@@ -0,0 +1,155 @@
using StellaOps.ExportCenter.WebService.Distribution.Oci;
namespace StellaOps.ExportCenter.Tests.Distribution.Oci;
public class OciRegistryAuthTests
{
[Fact]
public void FromOptions_WithBasicAuth_SetsBasicMode()
{
var options = new OciRegistryAuthOptions
{
Username = "testuser",
Password = "testpass"
};
var auth = OciRegistryAuthorization.FromOptions("registry.example.com", options);
Assert.Equal(OciRegistryAuthMode.Basic, auth.Mode);
Assert.Equal("testuser", auth.Username);
Assert.Equal("testpass", auth.Password);
Assert.Equal("registry.example.com", auth.Registry);
}
[Fact]
public void FromOptions_WithIdentityToken_SetsIdentityTokenMode()
{
var options = new OciRegistryAuthOptions
{
IdentityToken = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9..."
};
var auth = OciRegistryAuthorization.FromOptions("ghcr.io", options);
Assert.Equal(OciRegistryAuthMode.IdentityToken, auth.Mode);
Assert.Equal("eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9...", auth.IdentityToken);
}
[Fact]
public void FromOptions_WithRefreshToken_SetsRefreshTokenMode()
{
var options = new OciRegistryAuthOptions
{
RefreshToken = "refresh_token_value"
};
var auth = OciRegistryAuthorization.FromOptions("registry.example.com", options);
Assert.Equal(OciRegistryAuthMode.RefreshToken, auth.Mode);
Assert.Equal("refresh_token_value", auth.RefreshToken);
}
[Fact]
public void FromOptions_WithNoCredentials_SetsAnonymousMode()
{
var options = new OciRegistryAuthOptions();
var auth = OciRegistryAuthorization.FromOptions("public.ecr.aws", options);
Assert.Equal(OciRegistryAuthMode.Anonymous, auth.Mode);
}
[Fact]
public void FromOptions_IdentityTokenTakesPrecedence()
{
var options = new OciRegistryAuthOptions
{
Username = "user",
Password = "pass",
IdentityToken = "token"
};
var auth = OciRegistryAuthorization.FromOptions("registry.example.com", options);
Assert.Equal(OciRegistryAuthMode.IdentityToken, auth.Mode);
}
[Fact]
public void Anonymous_CreatesAnonymousAuth()
{
var auth = OciRegistryAuthorization.Anonymous("public-registry.io");
Assert.Equal(OciRegistryAuthMode.Anonymous, auth.Mode);
Assert.Equal("public-registry.io", auth.Registry);
Assert.True(auth.AllowAnonymousFallback);
}
[Fact]
public void ApplyTo_WithBasicAuth_SetsAuthorizationHeader()
{
var auth = new OciRegistryAuthorization
{
Registry = "registry.example.com",
Mode = OciRegistryAuthMode.Basic,
Username = "user",
Password = "pass"
};
using var request = new HttpRequestMessage(HttpMethod.Get, "https://registry.example.com/v2/");
auth.ApplyTo(request);
Assert.NotNull(request.Headers.Authorization);
Assert.Equal("Basic", request.Headers.Authorization.Scheme);
// Base64 of "user:pass"
Assert.Equal("dXNlcjpwYXNz", request.Headers.Authorization.Parameter);
}
[Fact]
public void ApplyTo_WithBearerToken_SetsAuthorizationHeader()
{
var auth = new OciRegistryAuthorization
{
Registry = "ghcr.io",
Mode = OciRegistryAuthMode.IdentityToken,
IdentityToken = "my-bearer-token"
};
using var request = new HttpRequestMessage(HttpMethod.Get, "https://ghcr.io/v2/");
auth.ApplyTo(request);
Assert.NotNull(request.Headers.Authorization);
Assert.Equal("Bearer", request.Headers.Authorization.Scheme);
Assert.Equal("my-bearer-token", request.Headers.Authorization.Parameter);
}
[Fact]
public void ApplyTo_WithAnonymous_NoAuthorizationHeader()
{
var auth = OciRegistryAuthorization.Anonymous("public.ecr.aws");
using var request = new HttpRequestMessage(HttpMethod.Get, "https://public.ecr.aws/v2/");
auth.ApplyTo(request);
Assert.Null(request.Headers.Authorization);
}
[Fact]
public void ApplyTo_WithBasicAuthEmptyPassword_UsesEmptyPassword()
{
var auth = new OciRegistryAuthorization
{
Registry = "registry.example.com",
Mode = OciRegistryAuthMode.Basic,
Username = "user",
Password = null
};
using var request = new HttpRequestMessage(HttpMethod.Get, "https://registry.example.com/v2/");
auth.ApplyTo(request);
Assert.NotNull(request.Headers.Authorization);
Assert.Equal("Basic", request.Headers.Authorization.Scheme);
// Base64 of "user:"
Assert.Equal("dXNlcjo=", request.Headers.Authorization.Parameter);
}
}

View File

@@ -0,0 +1,65 @@
using System.Security.Cryptography;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Tests;
/// <summary>
/// Fake HMAC implementation for testing.
/// </summary>
internal sealed class FakeCryptoHmac : ICryptoHmac
{
public byte[] ComputeHmacForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string purpose)
{
// Simple deterministic hash for testing
using var sha256 = SHA256.Create();
var combined = new byte[key.Length + data.Length];
key.CopyTo(combined);
data.CopyTo(combined.AsSpan(key.Length));
return sha256.ComputeHash(combined);
}
public string ComputeHmacHexForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string purpose)
{
return Convert.ToHexStringLower(ComputeHmacForPurpose(key, data, purpose));
}
public string ComputeHmacBase64ForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string purpose)
{
return Convert.ToBase64String(ComputeHmacForPurpose(key, data, purpose));
}
public async ValueTask<byte[]> ComputeHmacForPurposeAsync(ReadOnlyMemory<byte> key, Stream stream, string purpose, CancellationToken cancellationToken = default)
{
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
return ComputeHmacForPurpose(key.Span, ms.ToArray(), purpose);
}
public async ValueTask<string> ComputeHmacHexForPurposeAsync(ReadOnlyMemory<byte> key, Stream stream, string purpose, CancellationToken cancellationToken = default)
{
var hmac = await ComputeHmacForPurposeAsync(key, stream, purpose, cancellationToken);
return Convert.ToHexStringLower(hmac);
}
public bool VerifyHmacForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, ReadOnlySpan<byte> expectedHmac, string purpose)
{
var computed = ComputeHmacForPurpose(key, data, purpose);
return computed.AsSpan().SequenceEqual(expectedHmac);
}
public bool VerifyHmacHexForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string expectedHmacHex, string purpose)
{
var computed = ComputeHmacHexForPurpose(key, data, purpose);
return string.Equals(computed, expectedHmacHex, StringComparison.OrdinalIgnoreCase);
}
public bool VerifyHmacBase64ForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string expectedHmacBase64, string purpose)
{
var computed = ComputeHmacBase64ForPurpose(key, data, purpose);
return string.Equals(computed, expectedHmacBase64, StringComparison.Ordinal);
}
public string GetAlgorithmForPurpose(string purpose) => "HMAC-SHA256";
public int GetOutputLengthForPurpose(string purpose) => 32; // SHA256 output length
}

View File

@@ -26,6 +26,7 @@ public class HmacDevPortalOfflineManifestSignerTests
var now = new DateTimeOffset(2025, 11, 4, 19, 0, 0, TimeSpan.Zero);
var signer = new HmacDevPortalOfflineManifestSigner(
new StaticOptionsMonitor<DevPortalOfflineManifestSigningOptions>(options),
new FakeCryptoHmac(),
new FixedTimeProvider(now),
NullLogger<HmacDevPortalOfflineManifestSigner>.Instance);
@@ -63,6 +64,7 @@ public class HmacDevPortalOfflineManifestSignerTests
var signer = new HmacDevPortalOfflineManifestSigner(
new StaticOptionsMonitor<DevPortalOfflineManifestSigningOptions>(options),
new FakeCryptoHmac(),
new FixedTimeProvider(DateTimeOffset.UtcNow),
NullLogger<HmacDevPortalOfflineManifestSigner>.Instance);

View File

@@ -0,0 +1,483 @@
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Manifest;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Manifest;
public sealed class ExportManifestWriterTests : IDisposable
{
private readonly string _tempDir;
private readonly ExportManifestWriter _writer;
private readonly FakeCryptoHmac _cryptoHmac;
public ExportManifestWriterTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"manifest-tests-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHmac = new FakeCryptoHmac();
_writer = new ExportManifestWriter(
NullLogger<ExportManifestWriter>.Instance,
cryptoRegistry: null,
cryptoHmac: _cryptoHmac,
timeProvider: TimeProvider.System);
}
public void Dispose()
{
try { Directory.Delete(_tempDir, recursive: true); }
catch { /* ignore cleanup errors */ }
}
[Fact]
public async Task WriteAsync_WritesManifestFile()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.True(File.Exists(result.ManifestPath));
Assert.False(string.IsNullOrEmpty(result.ManifestJson));
}
[Fact]
public async Task WriteAsync_WritesProvenanceFile()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.True(File.Exists(result.ProvenancePath));
Assert.False(string.IsNullOrEmpty(result.ProvenanceJson));
}
[Fact]
public async Task WriteAsync_ManifestContainsExpectedFields()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
var manifest = JsonSerializer.Deserialize<JsonElement>(result.ManifestJson!);
Assert.Equal("v1", manifest.GetProperty("version").GetString());
Assert.Equal(request.ExportId.ToString(), manifest.GetProperty("exportId").GetString());
Assert.Equal(request.TenantId.ToString(), manifest.GetProperty("tenantId").GetString());
}
[Fact]
public async Task WriteAsync_ProvenanceContainsExpectedFields()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
var provenance = JsonSerializer.Deserialize<JsonElement>(result.ProvenanceJson!);
Assert.Equal("v1", provenance.GetProperty("version").GetString());
Assert.Equal(request.ExportId.ToString(), provenance.GetProperty("exportId").GetString());
Assert.True(provenance.TryGetProperty("subjects", out _));
Assert.True(provenance.TryGetProperty("builder", out _));
}
[Fact]
public async Task WriteAsync_WithNoSigning_NoSignatureInOutput()
{
var request = CreateRequest(signingOptions: null);
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.Null(result.ManifestSignature);
Assert.Null(result.ProvenanceSignature);
Assert.Null(result.DetachedSignaturePath);
}
[Fact]
public async Task WriteAsync_WithEmbeddedSigning_SignatureInManifest()
{
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Embedded,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var request = CreateRequest(signingOptions: signingOptions);
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.NotNull(result.ManifestSignature);
Assert.Equal("test-key-id", result.ManifestSignature.KeyId);
Assert.Equal("HMAC-SHA256", result.ManifestSignature.Algorithm);
// Verify signature is embedded in JSON
var manifest = JsonSerializer.Deserialize<JsonElement>(result.ManifestJson!);
Assert.True(manifest.TryGetProperty("signature", out var sigElement));
Assert.Equal("test-key-id", sigElement.GetProperty("keyId").GetString());
}
[Fact]
public async Task WriteAsync_WithDetachedSigning_CreatesSignatureFile()
{
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Detached,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var request = CreateRequest(signingOptions: signingOptions);
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.NotNull(result.DetachedSignaturePath);
Assert.True(File.Exists(result.DetachedSignaturePath));
// Signature should NOT be embedded when mode is Detached only
var manifest = JsonSerializer.Deserialize<JsonElement>(result.ManifestJson!);
Assert.False(manifest.TryGetProperty("signature", out _));
}
[Fact]
public async Task WriteAsync_WithBothSigning_CreatesEmbeddedAndDetached()
{
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Both,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var request = CreateRequest(signingOptions: signingOptions);
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.NotNull(result.ManifestSignature);
Assert.NotNull(result.DetachedSignaturePath);
Assert.True(File.Exists(result.DetachedSignaturePath));
// Verify embedded signature
var manifest = JsonSerializer.Deserialize<JsonElement>(result.ManifestJson!);
Assert.True(manifest.TryGetProperty("signature", out _));
}
[Fact]
public async Task SignManifestAsync_ReturnsDsseEnvelope()
{
var manifestJson = """{"version":"v1","exportId":"test"}""";
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Detached,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var envelope = await _writer.SignManifestAsync(manifestJson, signingOptions);
Assert.NotNull(envelope);
Assert.Equal("application/vnd.stellaops.export.manifest+json", envelope.PayloadType);
Assert.NotEmpty(envelope.Payload);
Assert.Single(envelope.Signatures);
Assert.Equal("test-key-id", envelope.Signatures[0].KeyId);
}
[Fact]
public async Task SignProvenanceAsync_ReturnsDsseEnvelope()
{
var provenanceJson = """{"version":"v1","exportId":"test"}""";
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Detached,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var envelope = await _writer.SignProvenanceAsync(provenanceJson, signingOptions);
Assert.NotNull(envelope);
Assert.Equal("application/vnd.stellaops.export.provenance+json", envelope.PayloadType);
Assert.NotEmpty(envelope.Payload);
Assert.Single(envelope.Signatures);
}
[Fact]
public async Task VerifySignatureAsync_ValidSignature_ReturnsTrue()
{
var content = """{"version":"v1","exportId":"test"}""";
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Detached,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var envelope = await _writer.SignManifestAsync(content, signingOptions);
var isValid = await _writer.VerifySignatureAsync(content, envelope, signingOptions);
Assert.True(isValid);
}
[Fact]
public async Task VerifySignatureAsync_TamperedContent_ReturnsFalse()
{
var content = """{"version":"v1","exportId":"test"}""";
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Detached,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var envelope = await _writer.SignManifestAsync(content, signingOptions);
var tamperedContent = """{"version":"v1","exportId":"tampered"}""";
var isValid = await _writer.VerifySignatureAsync(tamperedContent, envelope, signingOptions);
Assert.False(isValid);
}
[Fact]
public async Task WriteAsync_NoOutputDirectory_ReturnsJsonButNoFiles()
{
var request = new ExportManifestWriteRequest(
Guid.NewGuid(),
Guid.NewGuid(),
CreateManifestContent(),
CreateProvenanceContent(),
SigningOptions: null,
OutputDirectory: null);
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.NotNull(result.ManifestJson);
Assert.NotNull(result.ProvenanceJson);
Assert.Empty(result.ManifestPath!);
Assert.Empty(result.ProvenancePath!);
}
[Fact]
public async Task WriteAsync_CreatesOutputDirectory()
{
var newDir = Path.Combine(_tempDir, "new-export");
var request = new ExportManifestWriteRequest(
Guid.NewGuid(),
Guid.NewGuid(),
CreateManifestContent(),
CreateProvenanceContent(),
SigningOptions: null,
OutputDirectory: newDir);
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
Assert.True(Directory.Exists(newDir));
}
[Fact]
public async Task WriteAsync_ManifestContainsCounts()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
var manifest = JsonSerializer.Deserialize<JsonElement>(result.ManifestJson!);
var counts = manifest.GetProperty("counts");
Assert.Equal(10, counts.GetProperty("total").GetInt32());
Assert.Equal(9, counts.GetProperty("successful").GetInt32());
Assert.Equal(1, counts.GetProperty("failed").GetInt32());
}
[Fact]
public async Task WriteAsync_ManifestContainsArtifacts()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
var manifest = JsonSerializer.Deserialize<JsonElement>(result.ManifestJson!);
var artifacts = manifest.GetProperty("artifacts");
Assert.Equal(2, artifacts.GetArrayLength());
}
[Fact]
public async Task WriteAsync_ProvenanceContainsSubjects()
{
var request = CreateRequest();
var result = await _writer.WriteAsync(request);
Assert.True(result.Success);
var provenance = JsonSerializer.Deserialize<JsonElement>(result.ProvenanceJson!);
var subjects = provenance.GetProperty("subjects");
Assert.Equal(2, subjects.GetArrayLength());
}
[Fact]
public async Task WriteAsync_HmacSigning_RequiresSecret()
{
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Embedded,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: null);
var request = CreateRequest(signingOptions: signingOptions);
var result = await _writer.WriteAsync(request);
Assert.False(result.Success);
Assert.Contains("secret", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task WriteAsync_DeterministicSignatures()
{
var signingOptions = new ExportManifestSigningOptions(
ExportSignatureMode.Embedded,
ExportSigningAlgorithm.HmacSha256,
"test-key-id",
Secret: "test-secret-key-12345678901234567890");
var request = CreateRequest(signingOptions: signingOptions);
var result1 = await _writer.WriteAsync(request);
var result2 = await _writer.WriteAsync(request);
Assert.True(result1.Success);
Assert.True(result2.Success);
// Same input should produce same signature
Assert.Equal(result1.ManifestSignature!.Value, result2.ManifestSignature!.Value);
}
private ExportManifestWriteRequest CreateRequest(
ExportManifestSigningOptions? signingOptions = null)
{
return new ExportManifestWriteRequest(
Guid.NewGuid(),
Guid.NewGuid(),
CreateManifestContent(),
CreateProvenanceContent(),
signingOptions,
_tempDir);
}
private ExportManifestContent CreateManifestContent()
{
return new ExportManifestContent(
"v1",
Guid.NewGuid().ToString(),
Guid.NewGuid().ToString(),
new ExportManifestProfile(null, "mirror", "full"),
new ExportManifestScope(
["sbom", "vex"],
["product-a", "product-b"],
new ExportManifestTimeWindow(
DateTimeOffset.UtcNow.AddDays(-30),
DateTimeOffset.UtcNow),
null),
new ExportManifestCounts(10, 9, 1, 0, new Dictionary<string, int>
{
["sbom"] = 5,
["vex"] = 4
}),
[
new ExportManifestArtifact("data/sbom-001.json", "abc123", 1024, "application/json", "sbom"),
new ExportManifestArtifact("data/vex-001.json", "def456", 512, "application/json", "vex")
],
DateTimeOffset.UtcNow,
"sha256:root-hash-here");
}
private ExportProvenanceContent CreateProvenanceContent()
{
return new ExportProvenanceContent(
"v1",
Guid.NewGuid().ToString(),
Guid.NewGuid().ToString(),
[
new ExportProvenanceSubject("export-bundle.tgz", new Dictionary<string, string>
{
["sha256"] = "abc123def456"
}),
new ExportProvenanceSubject("export-manifest.json", new Dictionary<string, string>
{
["sha256"] = "789ghi012jkl"
})
],
new ExportProvenanceInputs(
"profile-001",
["sbom", "vex"],
["product-a"],
"correlation-123"),
new ExportProvenanceBuilder(
"StellaOps.ExportCenter",
"1.0.0",
DateTimeOffset.UtcNow),
DateTimeOffset.UtcNow);
}
/// <summary>
/// Fake HMAC implementation for testing.
/// </summary>
private sealed class FakeCryptoHmac : ICryptoHmac
{
public byte[] ComputeHmacForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string purpose)
{
// Simple deterministic hash for testing
using var sha256 = System.Security.Cryptography.SHA256.Create();
var combined = new byte[key.Length + data.Length];
key.CopyTo(combined);
data.CopyTo(combined.AsSpan(key.Length));
return sha256.ComputeHash(combined);
}
public string ComputeHmacHexForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string purpose)
{
return Convert.ToHexStringLower(ComputeHmacForPurpose(key, data, purpose));
}
public string ComputeHmacBase64ForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string purpose)
{
return Convert.ToBase64String(ComputeHmacForPurpose(key, data, purpose));
}
public async ValueTask<byte[]> ComputeHmacForPurposeAsync(ReadOnlyMemory<byte> key, Stream stream, string purpose, CancellationToken cancellationToken = default)
{
using var ms = new MemoryStream();
await stream.CopyToAsync(ms, cancellationToken);
return ComputeHmacForPurpose(key.Span, ms.ToArray(), purpose);
}
public async ValueTask<string> ComputeHmacHexForPurposeAsync(ReadOnlyMemory<byte> key, Stream stream, string purpose, CancellationToken cancellationToken = default)
{
var hmac = await ComputeHmacForPurposeAsync(key, stream, purpose, cancellationToken);
return Convert.ToHexStringLower(hmac);
}
public bool VerifyHmacForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, ReadOnlySpan<byte> expectedHmac, string purpose)
{
var computed = ComputeHmacForPurpose(key, data, purpose);
return computed.AsSpan().SequenceEqual(expectedHmac);
}
public bool VerifyHmacHexForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string expectedHmacHex, string purpose)
{
var computed = ComputeHmacHexForPurpose(key, data, purpose);
return string.Equals(computed, expectedHmacHex, StringComparison.OrdinalIgnoreCase);
}
public bool VerifyHmacBase64ForPurpose(ReadOnlySpan<byte> key, ReadOnlySpan<byte> data, string expectedHmacBase64, string purpose)
{
var computed = ComputeHmacBase64ForPurpose(key, data, purpose);
return string.Equals(computed, expectedHmacBase64, StringComparison.Ordinal);
}
public string GetAlgorithmForPurpose(string purpose) => "HMAC-SHA256";
public int GetOutputLengthForPurpose(string purpose) => 32;
}
}

View File

@@ -18,7 +18,7 @@ public sealed class MirrorBundleBuilderTests : IDisposable
{
_tempDir = Path.Combine(Path.GetTempPath(), $"mirror-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHash = new DefaultCryptoHash();
_cryptoHash = new FakeCryptoHash();
_builder = new MirrorBundleBuilder(_cryptoHash);
}
@@ -375,11 +375,12 @@ public sealed class MirrorBundleBuilderTests : IDisposable
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
var posixEntry = entry as PosixTarEntry;
entries.Add(new TarEntryMetadata(
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
posixEntry?.UserName ?? string.Empty,
posixEntry?.GroupName ?? string.Empty,
entry.ModificationTime));
}

View File

@@ -0,0 +1,422 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.Adapters;
using StellaOps.ExportCenter.Core.MirrorBundle;
using StellaOps.ExportCenter.Core.Planner;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public class MirrorDeltaAdapterTests : IDisposable
{
private readonly ICryptoHash _cryptoHash;
private readonly InMemoryMirrorBaseManifestStore _manifestStore;
private readonly InMemoryMirrorContentStore _contentStore;
private readonly MirrorDeltaService _deltaService;
private readonly MirrorDeltaAdapter _adapter;
private readonly string _tempDir;
public MirrorDeltaAdapterTests()
{
_cryptoHash = new FakeCryptoHash();
_manifestStore = new InMemoryMirrorBaseManifestStore();
_contentStore = new InMemoryMirrorContentStore(_cryptoHash);
_deltaService = new MirrorDeltaService(_manifestStore, NullLogger<MirrorDeltaService>.Instance);
_adapter = new MirrorDeltaAdapter(
NullLogger<MirrorDeltaAdapter>.Instance,
_cryptoHash,
_deltaService,
_manifestStore,
_contentStore);
_tempDir = Path.Combine(Path.GetTempPath(), $"mirror-delta-tests-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
}
public void Dispose()
{
_contentStore.Clear();
_manifestStore.Clear();
if (Directory.Exists(_tempDir))
{
try { Directory.Delete(_tempDir, true); } catch { }
}
}
[Fact]
public void AdapterId_IsMirrorDelta()
{
Assert.Equal("mirror:delta", _adapter.AdapterId);
}
[Fact]
public void DisplayName_IsMirrorDeltaBundle()
{
Assert.Equal("Mirror Delta Bundle", _adapter.DisplayName);
}
[Fact]
public void SupportedFormats_ContainsMirror()
{
Assert.Contains(ExportFormat.Mirror, _adapter.SupportedFormats);
}
[Fact]
public void SupportsStreaming_IsFalse()
{
Assert.False(_adapter.SupportsStreaming);
}
[Fact]
public async Task ValidateConfigAsync_WithMissingOutputDirectory_ReturnsError()
{
var config = new ExportAdapterConfig
{
AdapterId = "mirror:delta",
FormatOptions = new ExportFormatOptions { Format = ExportFormat.Mirror },
OutputDirectory = ""
};
var errors = await _adapter.ValidateConfigAsync(config);
Assert.NotEmpty(errors);
Assert.Contains(errors, e => e.Contains("Output directory"));
}
[Fact]
public async Task ValidateConfigAsync_WithValidConfig_ReturnsNoErrors()
{
var config = new ExportAdapterConfig
{
AdapterId = "mirror:delta",
FormatOptions = new ExportFormatOptions { Format = ExportFormat.Mirror },
OutputDirectory = _tempDir
};
var errors = await _adapter.ValidateConfigAsync(config);
Assert.Empty(errors);
}
[Fact]
public async Task ComputeDeltaAsync_WithNoBaseManifest_ReturnsAllItemsAsAdded()
{
var tenantId = Guid.NewGuid();
var baseRunId = Guid.NewGuid();
var items = new List<MirrorDeltaItem>
{
new()
{
ItemId = "item-1",
Category = MirrorBundleDataCategory.Advisories,
ContentHash = "hash1",
BundlePath = "data/advisories/item-1.json",
SizeBytes = 100
},
new()
{
ItemId = "item-2",
Category = MirrorBundleDataCategory.Vex,
ContentHash = "hash2",
BundlePath = "data/vex/item-2.json",
SizeBytes = 200
}
};
var request = new MirrorDeltaComputeRequest
{
BaseRunId = baseRunId,
BaseManifestDigest = "digest123",
TenantId = tenantId,
CurrentItems = items
};
var result = await _deltaService.ComputeDeltaAsync(request);
Assert.True(result.Success);
Assert.Equal(2, result.AddedItems.Count);
Assert.Empty(result.ChangedItems);
Assert.Empty(result.RemovedItems);
Assert.Empty(result.UnchangedItems);
}
[Fact]
public async Task ComputeDeltaAsync_WithBaseManifest_DetectsChanges()
{
var tenantId = Guid.NewGuid();
var baseRunId = Guid.NewGuid();
// Store base manifest
var baseEntries = new List<MirrorBaseManifestEntry>
{
new()
{
ItemId = "item-1",
Category = MirrorBundleDataCategory.Advisories,
BundlePath = "data/advisories/item-1.json",
ContentHash = "old-hash-1",
SizeBytes = 100
},
new()
{
ItemId = "item-2",
Category = MirrorBundleDataCategory.Vex,
BundlePath = "data/vex/item-2.json",
ContentHash = "hash-2",
SizeBytes = 200
},
new()
{
ItemId = "item-3",
Category = MirrorBundleDataCategory.Sbom,
BundlePath = "data/sbom/item-3.json",
ContentHash = "hash-3",
SizeBytes = 300
}
};
await _manifestStore.SaveManifestEntriesAsync(
baseRunId, tenantId, "digest123", baseEntries);
// Current items: item-1 changed, item-2 unchanged, item-3 removed, item-4 added
var currentItems = new List<MirrorDeltaItem>
{
new()
{
ItemId = "item-1",
Category = MirrorBundleDataCategory.Advisories,
ContentHash = "new-hash-1", // Changed
BundlePath = "data/advisories/item-1.json",
SizeBytes = 150
},
new()
{
ItemId = "item-2",
Category = MirrorBundleDataCategory.Vex,
ContentHash = "hash-2", // Unchanged
BundlePath = "data/vex/item-2.json",
SizeBytes = 200
},
new()
{
ItemId = "item-4",
Category = MirrorBundleDataCategory.Advisories,
ContentHash = "hash-4", // New
BundlePath = "data/advisories/item-4.json",
SizeBytes = 400
}
};
var request = new MirrorDeltaComputeRequest
{
BaseRunId = baseRunId,
BaseManifestDigest = "digest123",
TenantId = tenantId,
CurrentItems = currentItems
};
var result = await _deltaService.ComputeDeltaAsync(request);
Assert.True(result.Success);
// Added: item-4
Assert.Single(result.AddedItems);
Assert.Contains(result.AddedItems, i => i.ItemId == "item-4");
// Changed: item-1
Assert.Single(result.ChangedItems);
Assert.Contains(result.ChangedItems, c => c.Current.ItemId == "item-1");
Assert.Equal("old-hash-1", result.ChangedItems[0].PreviousContentHash);
// Unchanged: item-2
Assert.Single(result.UnchangedItems);
Assert.Contains(result.UnchangedItems, i => i.ItemId == "item-2");
// Removed: item-3
Assert.Single(result.RemovedItems);
Assert.Contains(result.RemovedItems, r => r.ItemId == "item-3");
}
[Fact]
public async Task ComputeDeltaAsync_WithResetBaseline_ReturnsAllAsAdded()
{
var tenantId = Guid.NewGuid();
var baseRunId = Guid.NewGuid();
// Store base manifest
var baseEntries = new List<MirrorBaseManifestEntry>
{
new()
{
ItemId = "item-1",
Category = MirrorBundleDataCategory.Advisories,
BundlePath = "data/advisories/item-1.json",
ContentHash = "hash-1",
SizeBytes = 100
}
};
await _manifestStore.SaveManifestEntriesAsync(
baseRunId, tenantId, "digest123", baseEntries);
var currentItems = new List<MirrorDeltaItem>
{
new()
{
ItemId = "item-1",
Category = MirrorBundleDataCategory.Advisories,
ContentHash = "hash-1", // Same hash
BundlePath = "data/advisories/item-1.json",
SizeBytes = 100
}
};
var request = new MirrorDeltaComputeRequest
{
BaseRunId = baseRunId,
BaseManifestDigest = "digest123",
TenantId = tenantId,
CurrentItems = currentItems,
ResetBaseline = true // Force include all items
};
var result = await _deltaService.ComputeDeltaAsync(request);
Assert.True(result.Success);
Assert.True(result.BaselineReset);
Assert.Single(result.AddedItems);
Assert.Empty(result.ChangedItems);
Assert.Empty(result.RemovedItems);
Assert.Empty(result.UnchangedItems);
}
[Fact]
public async Task ComputeDeltaAsync_WithDigestMismatch_ReturnsError()
{
var tenantId = Guid.NewGuid();
var baseRunId = Guid.NewGuid();
// Store base manifest with different digest
await _manifestStore.SaveManifestEntriesAsync(
baseRunId, tenantId, "stored-digest", new List<MirrorBaseManifestEntry>
{
new()
{
ItemId = "item-1",
Category = MirrorBundleDataCategory.Advisories,
BundlePath = "data/advisories/item-1.json",
ContentHash = "hash-1",
SizeBytes = 100
}
});
var request = new MirrorDeltaComputeRequest
{
BaseRunId = baseRunId,
BaseManifestDigest = "different-digest", // Mismatch
TenantId = tenantId,
CurrentItems = new List<MirrorDeltaItem>()
};
var result = await _deltaService.ComputeDeltaAsync(request);
Assert.False(result.Success);
Assert.Contains("mismatch", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task ContentStore_StoresAndRetrieves()
{
var content = "test content"u8.ToArray();
using var stream = new MemoryStream(content);
var hash = await _contentStore.StoreAsync(stream);
Assert.False(string.IsNullOrEmpty(hash));
Assert.True(await _contentStore.ExistsAsync(hash));
using var retrieved = await _contentStore.GetAsync(hash);
Assert.NotNull(retrieved);
using var ms = new MemoryStream();
await retrieved.CopyToAsync(ms);
Assert.Equal(content, ms.ToArray());
}
[Fact]
public void ContentStore_GetLocalPath_ReturnsPathForStoredContent()
{
var content = "test content"u8.ToArray();
using var stream = new MemoryStream(content);
var hash = _contentStore.StoreAsync(stream).Result;
var localPath = _contentStore.GetLocalPath(hash);
Assert.NotNull(localPath);
Assert.True(File.Exists(localPath));
}
[Fact]
public void ContentStore_GetLocalPath_ReturnsNullForMissingContent()
{
var localPath = _contentStore.GetLocalPath("nonexistent-hash");
Assert.Null(localPath);
}
private sealed class FakeCryptoHash : ICryptoHash
{
public byte[] ComputeHash(ReadOnlySpan<byte> data, string? algorithmId = null)
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
return sha256.ComputeHash(data.ToArray());
}
public string ComputeHashHex(ReadOnlySpan<byte> data, string? algorithmId = null)
{
var hash = ComputeHash(data, algorithmId);
return Convert.ToHexString(hash).ToLowerInvariant();
}
public string ComputeHashBase64(ReadOnlySpan<byte> data, string? algorithmId = null)
{
var hash = ComputeHash(data, algorithmId);
return Convert.ToBase64String(hash);
}
public ValueTask<byte[]> ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
var hash = sha256.ComputeHash(stream);
return new ValueTask<byte[]>(hash);
}
public async ValueTask<string> ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
var hash = await ComputeHashAsync(stream, algorithmId, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHash(data, null);
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashHex(data, null);
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashBase64(data, null);
public ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashAsync(stream, null, cancellationToken);
public ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashHexAsync(stream, null, cancellationToken);
public string GetAlgorithmForPurpose(string purpose) => "sha256";
public string GetHashPrefix(string purpose) => "sha256:";
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> GetHashPrefix(purpose) + ComputeHashHexForPurpose(data, purpose);
}
}

View File

@@ -0,0 +1,543 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.PackRun;
using Xunit;
namespace StellaOps.ExportCenter.Tests.PackRun;
public class PackRunIntegrationServiceTests
{
private readonly InMemoryPackRunDataStore _dataStore;
private readonly InMemoryPackRunExportStore _exportStore;
private readonly PackRunIntegrationService _service;
private readonly TimeProvider _timeProvider;
private readonly string _tenantId = Guid.NewGuid().ToString();
public PackRunIntegrationServiceTests()
{
_dataStore = new InMemoryPackRunDataStore();
_exportStore = new InMemoryPackRunExportStore();
_timeProvider = TimeProvider.System;
_service = new PackRunIntegrationService(
_dataStore,
_exportStore,
NullLogger<PackRunIntegrationService>.Instance,
_timeProvider);
}
[Fact]
public async Task IntegrateAsync_WithValidPackRun_ReturnsSuccess()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test artifact content"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash);
var request = new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId
};
// Act
var result = await _service.IntegrateAsync(request);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Reference);
Assert.Equal(packRunId, result.Reference.RunId);
Assert.Single(result.IntegratedArtifacts);
}
[Fact]
public async Task IntegrateAsync_WithNonExistentPackRun_ReturnsNotFoundError()
{
// Arrange
var request = new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = "non-existent",
ExportRunId = Guid.NewGuid().ToString()
};
// Act
var result = await _service.IntegrateAsync(request);
// Assert
Assert.False(result.Success);
Assert.Equal(PackRunIntegrationErrors.PackRunNotFound, result.ErrorCode);
}
[Fact]
public async Task IntegrateAsync_WithTenantMismatch_ReturnsNotFoundError()
{
// Arrange
// When a pack run exists under a different tenant, we should not reveal
// its existence - return "not found" instead of "tenant mismatch" for security
var packRunId = Guid.NewGuid().ToString();
var differentTenantId = Guid.NewGuid().ToString();
_dataStore.AddStatus(new PackRunStatusInfo
{
RunId = packRunId,
TenantId = differentTenantId, // Different tenant
PlanHash = "sha256:abc123",
Status = "Completed"
});
var request = new PackRunIntegrationRequest
{
TenantId = _tenantId, // Request with original tenant
PackRunId = packRunId,
ExportRunId = Guid.NewGuid().ToString()
};
// Act
var result = await _service.IntegrateAsync(request);
// Assert
Assert.False(result.Success);
// Tenant mismatch returns "not found" for security (don't reveal pack run exists under other tenant)
Assert.Equal(PackRunIntegrationErrors.PackRunNotFound, result.ErrorCode);
}
[Fact]
public async Task IntegrateAsync_WithArtifactFilter_FiltersArtifacts()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
_dataStore.AddStatus(new PackRunStatusInfo
{
RunId = packRunId,
TenantId = _tenantId,
PlanHash = "sha256:abc123",
Status = "Completed"
});
var content1 = "content1"u8.ToArray();
var content2 = "content2"u8.ToArray();
var hash1 = ComputeHash(content1);
var hash2 = ComputeHash(content2);
_dataStore.AddArtifact(_tenantId, packRunId, new PackRunExportArtifact
{
Name = "artifact1.txt",
Path = "artifacts/artifact1.txt",
Sha256 = hash1,
SizeBytes = content1.Length,
MediaType = "text/plain"
}, content1);
_dataStore.AddArtifact(_tenantId, packRunId, new PackRunExportArtifact
{
Name = "artifact2.txt",
Path = "artifacts/artifact2.txt",
Sha256 = hash2,
SizeBytes = content2.Length,
MediaType = "text/plain"
}, content2);
var request = new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId,
ArtifactFilter = ["artifact1.txt"]
};
// Act
var result = await _service.IntegrateAsync(request);
// Assert
Assert.True(result.Success);
Assert.Single(result.IntegratedArtifacts);
Assert.Contains(result.IntegratedArtifacts, a => a.SourcePath == "artifacts/artifact1.txt");
}
[Fact]
public async Task IntegrateAsync_CreatesProvenanceLink()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test content"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash, includeEvidence: true);
var request = new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId
};
// Act
var result = await _service.IntegrateAsync(request);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Reference?.ProvenanceLink);
Assert.Equal(packRunId, result.Reference.ProvenanceLink.PackRunId);
Assert.Equal(exportRunId, result.Reference.ProvenanceLink.ExportRunId);
}
[Fact]
public async Task ListReferencesAsync_ReturnsAllReferences()
{
// Arrange
var packRunId1 = Guid.NewGuid().ToString();
var packRunId2 = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId1, content, hash);
SetupPackRun(packRunId2, content, hash);
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId1,
ExportRunId = exportRunId
});
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId2,
ExportRunId = exportRunId
});
// Act
var references = await _service.ListReferencesAsync(_tenantId, exportRunId);
// Assert
Assert.Equal(2, references.Count);
}
[Fact]
public async Task GetReferenceAsync_WithExistingReference_ReturnsReference()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash);
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId
});
// Act
var reference = await _service.GetReferenceAsync(_tenantId, exportRunId, packRunId);
// Assert
Assert.NotNull(reference);
Assert.Equal(packRunId, reference.RunId);
}
[Fact]
public async Task GetReferenceAsync_WithNonExistentReference_ReturnsNull()
{
// Act
var reference = await _service.GetReferenceAsync(
_tenantId,
Guid.NewGuid().ToString(),
Guid.NewGuid().ToString());
// Assert
Assert.Null(reference);
}
[Fact]
public async Task VerifyAsync_WithValidArtifacts_ReturnsValid()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test content for verification"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash);
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId
});
var request = new PackRunVerificationRequest
{
TenantId = _tenantId,
ExportRunId = exportRunId,
VerifyHashes = true
};
// Act
var result = await _service.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
Assert.Single(result.HashResults);
Assert.True(result.HashResults[0].IsValid);
}
[Fact]
public async Task VerifyAsync_WithMissingReference_ReturnsInvalid()
{
// Arrange
var request = new PackRunVerificationRequest
{
TenantId = _tenantId,
ExportRunId = Guid.NewGuid().ToString(),
VerifyHashes = true
};
// Act
var result = await _service.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
Assert.Equal(PackRunProvenanceVerificationStatus.MissingLink, result.ProvenanceStatus);
}
[Fact]
public async Task VerifyAsync_WithProvenanceLink_VerifiesProvenance()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash, includeEvidence: true);
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId
});
var request = new PackRunVerificationRequest
{
TenantId = _tenantId,
ExportRunId = exportRunId,
VerifyProvenance = true
};
// Act
var result = await _service.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
Assert.Equal(PackRunProvenanceVerificationStatus.Valid, result.ProvenanceStatus);
}
[Fact]
public async Task VerifyAsync_WithValidAttestation_ReturnsValidAttestation()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash, includeAttestation: true);
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId
});
var request = new PackRunVerificationRequest
{
TenantId = _tenantId,
ExportRunId = exportRunId,
VerifyAttestation = true
};
// Act
var result = await _service.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
Assert.Equal(PackRunAttestationVerificationStatus.Valid, result.AttestationStatus);
}
[Fact]
public async Task VerifyAsync_ForSpecificPackRun_OnlyVerifiesThatRun()
{
// Arrange
var packRunId1 = Guid.NewGuid().ToString();
var packRunId2 = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId1, content, hash);
SetupPackRun(packRunId2, content, hash);
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId1,
ExportRunId = exportRunId
});
await _service.IntegrateAsync(new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId2,
ExportRunId = exportRunId
});
var request = new PackRunVerificationRequest
{
TenantId = _tenantId,
ExportRunId = exportRunId,
PackRunId = packRunId1, // Only verify this run
VerifyHashes = true
};
// Act
var result = await _service.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
Assert.Equal(packRunId1, result.PackRunId);
Assert.Single(result.HashResults);
}
[Fact]
public async Task IntegrateAsync_WithLinkKindProvenanceOnly_SetsCorrectLinkKind()
{
// Arrange
var packRunId = Guid.NewGuid().ToString();
var exportRunId = Guid.NewGuid().ToString();
var content = "test"u8.ToArray();
var hash = ComputeHash(content);
SetupPackRun(packRunId, content, hash);
var request = new PackRunIntegrationRequest
{
TenantId = _tenantId,
PackRunId = packRunId,
ExportRunId = exportRunId,
LinkKind = PackRunLinkKind.ProvenanceOnly
};
// Act
var result = await _service.IntegrateAsync(request);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Reference?.ProvenanceLink);
Assert.Equal(PackRunLinkKind.ProvenanceOnly, result.Reference.ProvenanceLink.LinkKind);
}
private void SetupPackRun(
string packRunId,
byte[] content,
string hash,
bool includeEvidence = false,
bool includeAttestation = false)
{
var attestationId = includeAttestation ? Guid.NewGuid() : (Guid?)null;
var evidenceId = includeEvidence ? Guid.NewGuid() : (Guid?)null;
var now = DateTimeOffset.UtcNow;
_dataStore.AddStatus(new PackRunStatusInfo
{
RunId = packRunId,
TenantId = _tenantId,
PlanHash = "sha256:planhashabc123",
Status = "Completed",
CompletedAt = now,
EvidenceSnapshotId = evidenceId,
AttestationId = attestationId
});
_dataStore.AddArtifact(_tenantId, packRunId, new PackRunExportArtifact
{
Name = "test-artifact.txt",
Path = "artifacts/test-artifact.txt",
Sha256 = hash,
SizeBytes = content.Length,
MediaType = "text/plain"
}, content);
if (includeEvidence)
{
_dataStore.SetEvidence(_tenantId, packRunId, new PackRunEvidenceExport
{
SnapshotId = evidenceId!.Value,
RunId = packRunId,
PlanHash = "sha256:planhashabc123",
RootHash = "sha256:evidenceroothashabc123",
Kind = "RunCompletion",
CreatedAt = now,
MaterialCount = 1,
Materials =
[
new PackRunMaterialExport
{
Section = "artifacts",
Path = "test-artifact.txt",
Sha256 = hash,
SizeBytes = content.Length,
MediaType = "text/plain"
}
]
});
}
if (includeAttestation)
{
_dataStore.SetAttestation(_tenantId, packRunId, new PackRunAttestationExport
{
AttestationId = attestationId!.Value,
RunId = packRunId,
PlanHash = "sha256:planhashabc123",
PredicateType = "https://stellaops.io/attestation/pack-run/v1",
Status = "Signed",
CreatedAt = now,
SubjectCount = 1,
EnvelopeDigest = "sha256:envelopedigestabc123",
Subjects =
[
new PackRunProvenanceSubject
{
Name = "artifacts/test-artifact.txt",
Digest = new Dictionary<string, string> { ["sha256"] = hash.Replace("sha256:", "") }
}
]
});
}
}
private static string ComputeHash(byte[] content)
{
var hash = SHA256.HashData(content);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -18,7 +18,7 @@ public sealed class PortableEvidenceExportBuilderTests : IDisposable
{
_tempDir = Path.Combine(Path.GetTempPath(), $"portable-evidence-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHash = new DefaultCryptoHash();
_cryptoHash = new FakeCryptoHash();
_builder = new PortableEvidenceExportBuilder(_cryptoHash);
}
@@ -361,12 +361,13 @@ public sealed class PortableEvidenceExportBuilderTests : IDisposable
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
var posixEntry = entry as PosixTarEntry;
entries.Add(new TarEntryMetadataWithName(
entry.Name,
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
posixEntry?.UserName ?? string.Empty,
posixEntry?.GroupName ?? string.Empty,
entry.ModificationTime,
entry.Mode));
}

View File

@@ -19,7 +19,7 @@ public sealed class RiskBundleJobTests
Guid.NewGuid(),
Providers: new[] { new RiskBundleProviderInput("cisa-kev", providerPath, "CISA KEV") });
var signer = new HmacRiskBundleManifestSigner("secret", "risk-key");
var signer = new HmacRiskBundleManifestSigner(new FakeCryptoHmac(), "secret", "risk-key");
var store = new InMemoryObjectStore();
var job = new RiskBundleJob(
new RiskBundleBuilder(),

View File

@@ -1,4 +1,5 @@
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.RiskBundles;
namespace StellaOps.ExportCenter.Tests;
@@ -8,7 +9,7 @@ public class RiskBundleSignerTests
[Fact]
public async Task SignAsync_ProducesDsseEnvelope()
{
var signer = new HmacRiskBundleManifestSigner("secret-key", "test-key");
var signer = new HmacRiskBundleManifestSigner(new FakeCryptoHmac(), "secret-key", "test-key");
const string manifest = "{\"foo\":1}";
var doc = await signer.SignAsync(manifest, TestContext.Current.CancellationToken);

View File

@@ -0,0 +1,400 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Scheduling;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Scheduling;
public class ExportRetentionServiceTests
{
private readonly InMemoryExportRetentionStore _store;
private readonly ExportRetentionService _service;
public ExportRetentionServiceTests()
{
_store = new InMemoryExportRetentionStore();
_service = new ExportRetentionService(_store, NullLogger<ExportRetentionService>.Instance);
}
[Fact]
public void ComputeExpiration_ForSuccessfulRun_UsesSuccessfulDays()
{
var retention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
FailedRunDays = 7
};
var completedAt = new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero);
var expiration = _service.ComputeExpiration(retention, completedAt, success: true);
Assert.Equal(completedAt.AddDays(30), expiration);
}
[Fact]
public void ComputeExpiration_ForFailedRun_UsesFailedDays()
{
var retention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
FailedRunDays = 7
};
var completedAt = new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero);
var expiration = _service.ComputeExpiration(retention, completedAt, success: false);
Assert.Equal(completedAt.AddDays(7), expiration);
}
[Fact]
public async Task PruneAsync_WithNoRuns_ReturnsEmptyResult()
{
var request = new RetentionPruneRequest
{
TenantId = Guid.NewGuid(),
Execute = true
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Equal(0, result.RunsPruned);
Assert.Empty(result.PrunedRuns);
}
[Fact]
public async Task PruneAsync_WithExpiredRuns_DeletesRuns()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
// Add runs - some expired, some not
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId,
CompletedAt = now.AddDays(-60), // Old, should be pruned
ArtifactCount = 5,
TotalSizeBytes = 1000
}, tenantId);
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId,
CompletedAt = now.AddDays(-10), // Recent, should be kept
ArtifactCount = 3,
TotalSizeBytes = 500
}, tenantId);
var request = new RetentionPruneRequest
{
TenantId = tenantId,
Execute = true,
OverrideRetention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
MinimumRunsToRetain = 1
}
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Equal(1, result.RunsPruned);
}
[Fact]
public async Task PruneAsync_WithLegalHold_SkipsHeldRuns()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
var runId = Guid.NewGuid();
// Add an old run with legal hold
_store.AddRun(new DetailedRunInfo
{
RunId = runId,
ProfileId = profileId,
CompletedAt = now.AddDays(-60),
ArtifactCount = 5,
TotalSizeBytes = 1000
}, tenantId);
await _store.SetLegalHoldAsync(runId, true, "Legal investigation");
var request = new RetentionPruneRequest
{
TenantId = tenantId,
Execute = true,
OverrideRetention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
RespectLegalHold = true,
MinimumRunsToRetain = 0
}
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Equal(0, result.RunsPruned);
Assert.Equal(1, result.RunsSkippedLegalHold);
}
[Fact]
public async Task PruneAsync_DryRun_DoesNotDelete()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId,
CompletedAt = now.AddDays(-60),
ArtifactCount = 5,
TotalSizeBytes = 1000
}, tenantId);
var request = new RetentionPruneRequest
{
TenantId = tenantId,
Execute = false, // Dry run
OverrideRetention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
MinimumRunsToRetain = 0
}
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Single(result.PrunedRuns); // Would be pruned
// Verify run still exists
var profileIds = await _store.GetProfileIdsAsync(tenantId);
Assert.Contains(profileId, profileIds);
}
[Fact]
public async Task PruneAsync_RespectsMinimumRunsToRetain()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
// Add multiple old runs
for (int i = 0; i < 10; i++)
{
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId,
CompletedAt = now.AddDays(-60 - i),
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
}
var request = new RetentionPruneRequest
{
TenantId = tenantId,
Execute = true,
OverrideRetention = new ExportRetentionConfig
{
SuccessfulRunDays = 1, // All runs are expired
MinimumRunsToRetain = 5 // But keep at least 5
}
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Equal(5, result.RunsPruned); // 10 - 5 minimum = 5 pruned
}
[Fact]
public async Task SetLegalHoldAsync_SetsHold()
{
var runId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
_store.AddRun(new DetailedRunInfo
{
RunId = runId,
ProfileId = profileId,
CompletedAt = DateTimeOffset.UtcNow,
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
await _service.SetLegalHoldAsync(runId, hold: true, reason: "Legal review");
var runInfo = await _store.GetRunInfoAsync(runId);
Assert.NotNull(runInfo);
Assert.True(runInfo.HasLegalHold);
Assert.Equal("Legal review", runInfo.LegalHoldReason);
}
[Fact]
public async Task SetLegalHoldAsync_ReleasesHold()
{
var runId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
_store.AddRun(new DetailedRunInfo
{
RunId = runId,
ProfileId = profileId,
CompletedAt = DateTimeOffset.UtcNow,
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
// Set then release
await _service.SetLegalHoldAsync(runId, hold: true, reason: "Legal review");
await _service.SetLegalHoldAsync(runId, hold: false);
var runInfo = await _store.GetRunInfoAsync(runId);
Assert.NotNull(runInfo);
Assert.False(runInfo.HasLegalHold);
}
[Fact]
public async Task GetRunsEligibleForPruningAsync_ReturnsExpiredRuns()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
var expiredRunId = Guid.NewGuid();
var recentRunId = Guid.NewGuid();
_store.AddRun(new DetailedRunInfo
{
RunId = expiredRunId,
ProfileId = profileId,
CompletedAt = now.AddDays(-60),
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
_store.AddRun(new DetailedRunInfo
{
RunId = recentRunId,
ProfileId = profileId,
CompletedAt = now.AddDays(-5),
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
var retention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
MinimumRunsToRetain = 1
};
var eligible = await _service.GetRunsEligibleForPruningAsync(
tenantId, profileId, retention, now);
Assert.Single(eligible);
Assert.Contains(expiredRunId, eligible);
Assert.DoesNotContain(recentRunId, eligible);
}
[Fact]
public async Task PruneAsync_ReturnsCorrectByteCount()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId,
CompletedAt = now.AddDays(-60),
ArtifactCount = 10,
TotalSizeBytes = 1_000_000
}, tenantId);
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId,
CompletedAt = now.AddDays(-50),
ArtifactCount = 5,
TotalSizeBytes = 500_000
}, tenantId);
var request = new RetentionPruneRequest
{
TenantId = tenantId,
Execute = true,
OverrideRetention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
MinimumRunsToRetain = 0
}
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Equal(2, result.RunsPruned);
Assert.Equal(15, result.ArtifactsDeleted);
Assert.Equal(1_500_000, result.BytesFreed);
}
[Fact]
public async Task PruneAsync_WithProfileFilter_OnlyPrunesSpecifiedProfile()
{
var tenantId = Guid.NewGuid();
var profileId1 = Guid.NewGuid();
var profileId2 = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
// Add old runs for both profiles
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId1,
CompletedAt = now.AddDays(-60),
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
_store.AddRun(new DetailedRunInfo
{
RunId = Guid.NewGuid(),
ProfileId = profileId2,
CompletedAt = now.AddDays(-60),
ArtifactCount = 1,
TotalSizeBytes = 100
}, tenantId);
var request = new RetentionPruneRequest
{
TenantId = tenantId,
ProfileId = profileId1, // Only prune profile1
Execute = true,
OverrideRetention = new ExportRetentionConfig
{
SuccessfulRunDays = 30,
MinimumRunsToRetain = 0
}
};
var result = await _service.PruneAsync(request);
Assert.True(result.Success);
Assert.Single(result.PrunedRuns);
Assert.Equal(profileId1, result.PrunedRuns[0].ProfileId);
}
}

View File

@@ -0,0 +1,453 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Scheduling;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Scheduling;
public class ExportSchedulerServiceTests
{
private readonly InMemoryExportScheduleStore _store;
private readonly ExportSchedulerService _service;
public ExportSchedulerServiceTests()
{
_store = new InMemoryExportScheduleStore();
_service = new ExportSchedulerService(_store, NullLogger<ExportSchedulerService>.Instance);
}
[Fact]
public void ValidateCronExpression_WithValidExpression_ReturnsValid()
{
var result = _service.ValidateCronExpression("0 0 * * *");
Assert.True(result.IsValid);
Assert.Null(result.ErrorMessage);
}
[Fact]
public void ValidateCronExpression_WithSixFieldExpression_ReturnsValid()
{
// 6-field cron with seconds
var result = _service.ValidateCronExpression("0 0 0 * * *");
Assert.True(result.IsValid);
Assert.Null(result.ErrorMessage);
}
[Fact]
public void ValidateCronExpression_WithInvalidExpression_ReturnsError()
{
var result = _service.ValidateCronExpression("invalid cron");
Assert.False(result.IsValid);
Assert.NotNull(result.ErrorMessage);
}
[Fact]
public void ValidateCronExpression_WithEmpty_ReturnsError()
{
var result = _service.ValidateCronExpression("");
Assert.False(result.IsValid);
Assert.NotNull(result.ErrorMessage);
}
[Fact]
public void GetNextScheduledTime_WithValidCron_ReturnsNextOccurrence()
{
var profileId = Guid.NewGuid();
var from = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
// Every hour at minute 0
var next = _service.GetNextScheduledTime(profileId, "0 * * * *", "UTC", from);
Assert.NotNull(next);
Assert.Equal(new DateTimeOffset(2025, 1, 1, 1, 0, 0, TimeSpan.Zero), next.Value);
}
[Fact]
public void GetNextScheduledTime_WithEmptyCron_ReturnsNull()
{
var profileId = Guid.NewGuid();
var from = DateTimeOffset.UtcNow;
var next = _service.GetNextScheduledTime(profileId, "", "UTC", from);
Assert.Null(next);
}
[Fact]
public void GetNextScheduledTime_WithInvalidTimezone_ReturnsNull()
{
var profileId = Guid.NewGuid();
var from = DateTimeOffset.UtcNow;
var next = _service.GetNextScheduledTime(profileId, "0 * * * *", "Invalid/Timezone", from);
Assert.Null(next);
}
[Fact]
public async Task TriggerAsync_WithNewProfile_ReturnsSuccess()
{
var profileId = Guid.NewGuid();
_store.AddProfile(new ScheduledProfileInfo
{
ProfileId = profileId,
TenantId = Guid.NewGuid(),
CronExpression = "0 * * * *"
});
var request = new ExportTriggerRequest
{
ProfileId = profileId,
Source = ExportTriggerSource.Manual
};
var result = await _service.TriggerAsync(request);
Assert.True(result.Accepted);
Assert.NotNull(result.RunId);
}
[Fact]
public async Task TriggerAsync_WhenAlreadyRunning_ReturnsRejected()
{
var profileId = Guid.NewGuid();
var existingRunId = Guid.NewGuid();
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsRunning = true,
CurrentRunId = existingRunId
});
var request = new ExportTriggerRequest
{
ProfileId = profileId,
Source = ExportTriggerSource.Manual
};
var result = await _service.TriggerAsync(request);
Assert.False(result.Accepted);
Assert.Equal(ExportTriggerRejection.ConcurrencyLimitReached, result.RejectionCode);
}
[Fact]
public async Task TriggerAsync_WhenPaused_ReturnsRejected()
{
var profileId = Guid.NewGuid();
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsPausedDueToFailures = true,
ConsecutiveFailures = 10
});
var request = new ExportTriggerRequest
{
ProfileId = profileId,
Source = ExportTriggerSource.Scheduled
};
var result = await _service.TriggerAsync(request);
Assert.False(result.Accepted);
Assert.Equal(ExportTriggerRejection.PausedDueToFailures, result.RejectionCode);
}
[Fact]
public async Task TriggerAsync_WhenPausedButForced_ReturnsSuccess()
{
var profileId = Guid.NewGuid();
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsPausedDueToFailures = true,
ConsecutiveFailures = 10
});
var request = new ExportTriggerRequest
{
ProfileId = profileId,
Source = ExportTriggerSource.Manual,
Force = true
};
var result = await _service.TriggerAsync(request);
Assert.True(result.Accepted);
Assert.NotNull(result.RunId);
}
[Fact]
public async Task UpdateRunCompletionAsync_WithSuccess_ResetsFailureCount()
{
var profileId = Guid.NewGuid();
_store.AddProfile(new ScheduledProfileInfo
{
ProfileId = profileId,
TenantId = Guid.NewGuid()
});
// Start a run - this registers the run ID in _runToProfile
var triggerResult = await _service.TriggerAsync(new ExportTriggerRequest
{
ProfileId = profileId,
Source = ExportTriggerSource.Manual
});
var runId = triggerResult.RunId!.Value;
// Simulate some failures first (use the same runId)
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsRunning = true,
CurrentRunId = runId,
ConsecutiveFailures = 5
});
// Complete successfully
await _service.UpdateRunCompletionAsync(runId, success: true);
var status = await _service.GetStatusAsync(profileId);
Assert.Equal(0, status?.ConsecutiveFailures);
Assert.False(status?.IsRunning);
}
[Fact]
public async Task UpdateRunCompletionAsync_WithFailure_IncrementsFailureCount()
{
var profileId = Guid.NewGuid();
_store.AddProfile(new ScheduledProfileInfo
{
ProfileId = profileId,
TenantId = Guid.NewGuid()
});
// Start a run - this registers the run ID in _runToProfile
var triggerResult = await _service.TriggerAsync(new ExportTriggerRequest
{
ProfileId = profileId,
Source = ExportTriggerSource.Manual
});
var runId = triggerResult.RunId!.Value;
// Simulate some failures first (use the same runId)
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsRunning = true,
CurrentRunId = runId,
ConsecutiveFailures = 2
});
var failure = new ExportFailureInfo
{
Class = ExportFailureClass.Transient,
Message = "Connection timeout",
OccurredAt = DateTimeOffset.UtcNow
};
await _service.UpdateRunCompletionAsync(runId, success: false, failure);
var status = await _store.GetStatusByRunAsync(runId);
Assert.Equal(3, status?.ConsecutiveFailures);
Assert.False(status?.IsRunning);
}
[Fact]
public void ComputeRetryDelay_WithFirstFailure_ReturnsInitialDelay()
{
var policy = new ExportRetryPolicy
{
MaxRetries = 3,
InitialDelaySeconds = 60,
BackoffMultiplier = 2.0
};
var delay = _service.ComputeRetryDelay(policy, failureCount: 0);
Assert.NotNull(delay);
Assert.Equal(TimeSpan.FromSeconds(60), delay.Value);
}
[Fact]
public void ComputeRetryDelay_WithExponentialBackoff_IncreasesDelay()
{
var policy = new ExportRetryPolicy
{
MaxRetries = 5,
InitialDelaySeconds = 60,
BackoffMultiplier = 2.0,
MaxDelaySeconds = 3600
};
var delay1 = _service.ComputeRetryDelay(policy, failureCount: 1);
var delay2 = _service.ComputeRetryDelay(policy, failureCount: 2);
Assert.NotNull(delay1);
Assert.NotNull(delay2);
Assert.Equal(TimeSpan.FromSeconds(120), delay1.Value); // 60 * 2^1
Assert.Equal(TimeSpan.FromSeconds(240), delay2.Value); // 60 * 2^2
}
[Fact]
public void ComputeRetryDelay_WithMaxRetries_ReturnsNull()
{
var policy = new ExportRetryPolicy
{
MaxRetries = 3,
InitialDelaySeconds = 60
};
var delay = _service.ComputeRetryDelay(policy, failureCount: 3);
Assert.Null(delay);
}
[Fact]
public void ComputeRetryDelay_CapsAtMaxDelay()
{
var policy = new ExportRetryPolicy
{
MaxRetries = 10,
InitialDelaySeconds = 60,
BackoffMultiplier = 10.0, // Would exceed max quickly
MaxDelaySeconds = 300
};
var delay = _service.ComputeRetryDelay(policy, failureCount: 5);
Assert.NotNull(delay);
Assert.Equal(TimeSpan.FromSeconds(300), delay.Value);
}
[Fact]
public void ClassifyFailure_WithSocketException_ReturnsNetworkError()
{
var ex = new System.Net.Sockets.SocketException();
var classification = _service.ClassifyFailure(ex);
Assert.Equal(ExportFailureClass.NetworkError, classification);
}
[Fact]
public void ClassifyFailure_WithTimeout_ReturnsTransient()
{
var ex = new TimeoutException();
var classification = _service.ClassifyFailure(ex);
Assert.Equal(ExportFailureClass.Transient, classification);
}
[Fact]
public void ClassifyFailure_WithCancellation_ReturnsCancelled()
{
var cts = new CancellationTokenSource();
cts.Cancel();
var ex = new OperationCanceledException(cts.Token);
var classification = _service.ClassifyFailure(ex);
Assert.Equal(ExportFailureClass.Cancelled, classification);
}
[Fact]
public void ClassifyFailure_WithArgumentException_ReturnsValidationError()
{
var ex = new ArgumentException("Invalid argument");
var classification = _service.ClassifyFailure(ex);
Assert.Equal(ExportFailureClass.ValidationError, classification);
}
[Fact]
public async Task GetProfilesDueForExecutionAsync_WithDueProfile_ReturnsProfile()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var pastTime = DateTimeOffset.UtcNow.AddMinutes(-10);
_store.AddProfile(new ScheduledProfileInfo
{
ProfileId = profileId,
TenantId = tenantId,
CronExpression = "0 * * * *",
Enabled = true
});
// Set next run to past
await _store.UpdateNextScheduledRunAsync(profileId, pastTime);
var due = await _service.GetProfilesDueForExecutionAsync(tenantId, DateTimeOffset.UtcNow);
Assert.Single(due);
Assert.Contains(profileId, due);
}
[Fact]
public async Task GetProfilesDueForExecutionAsync_WithRunningProfile_SkipsProfile()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var pastTime = DateTimeOffset.UtcNow.AddMinutes(-10);
_store.AddProfile(new ScheduledProfileInfo
{
ProfileId = profileId,
TenantId = tenantId,
CronExpression = "0 * * * *",
Enabled = true
});
// Set as running
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsRunning = true,
NextScheduledRun = pastTime
});
var due = await _service.GetProfilesDueForExecutionAsync(tenantId, DateTimeOffset.UtcNow);
Assert.Empty(due);
}
[Fact]
public async Task GetProfilesDueForExecutionAsync_WithPausedProfile_SkipsProfile()
{
var tenantId = Guid.NewGuid();
var profileId = Guid.NewGuid();
var pastTime = DateTimeOffset.UtcNow.AddMinutes(-10);
_store.AddProfile(new ScheduledProfileInfo
{
ProfileId = profileId,
TenantId = tenantId,
CronExpression = "0 * * * *",
Enabled = true
});
// Set as paused
_store.SetStatus(new ScheduledExportStatus
{
ProfileId = profileId,
IsPausedDueToFailures = true,
NextScheduledRun = pastTime
});
var due = await _service.GetProfilesDueForExecutionAsync(tenantId, DateTimeOffset.UtcNow);
Assert.Empty(due);
}
}

View File

@@ -0,0 +1,470 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Tenancy;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Tenancy;
public class TenantScopeEnforcerTests
{
private readonly InMemoryTenantScopeConfigStore _configStore;
private readonly InMemoryTenantResourceStore _resourceStore;
private readonly TenantScopeEnforcer _enforcer;
private readonly string _tenantId = "tenant-test-001";
private readonly string _projectId = "project-001";
public TenantScopeEnforcerTests()
{
_configStore = new InMemoryTenantScopeConfigStore();
_resourceStore = new InMemoryTenantResourceStore();
_enforcer = new TenantScopeEnforcer(
_configStore,
_resourceStore,
NullLogger<TenantScopeEnforcer>.Instance,
TimeProvider.System);
}
[Fact]
public async Task CheckScopeAsync_SameTenant_AllowsOperation()
{
// Arrange
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = _tenantId
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.True(result.Allowed);
Assert.False(result.IsCrossTenant);
}
[Fact]
public async Task CheckScopeAsync_CrossTenant_DeniedByDefault()
{
// Arrange
_configStore.SetDefaultConfig(new TenantScopeConfig { StrictIsolation = true });
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = "different-tenant"
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.False(result.Allowed);
Assert.Equal(TenantScopeDenialReason.StrictIsolationViolation, result.DenialReason);
}
[Fact]
public async Task CheckScopeAsync_CrossTenant_AllowedWhenInWhitelist()
{
// Arrange
var targetTenant = "target-tenant";
await _configStore.AddToGlobalWhitelistAsync(targetTenant);
_configStore.SetDefaultConfig(new TenantScopeConfig { StrictIsolation = false });
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = targetTenant
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.True(result.Allowed);
Assert.True(result.IsCrossTenant);
Assert.True(result.AllowedViaWhitelist);
}
[Fact]
public async Task CheckScopeAsync_CrossTenant_AllowedWhenInTenantWhitelist()
{
// Arrange
var targetTenant = "target-tenant";
await _configStore.SaveTenantConfigAsync(_tenantId, new TenantScopeConfig
{
StrictIsolation = false,
CrossTenantWhitelist = [targetTenant]
});
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = targetTenant
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.True(result.Allowed);
Assert.True(result.IsCrossTenant);
Assert.True(result.AllowedViaWhitelist);
}
[Fact]
public async Task CheckScopeAsync_CrossTenant_AllowedWhenInAllowedTargets()
{
// Arrange
var targetTenant = "target-tenant";
await _configStore.SaveTenantConfigAsync(_tenantId, new TenantScopeConfig
{
StrictIsolation = true,
AllowedTargetTenants = [targetTenant]
});
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = targetTenant
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.True(result.Allowed);
Assert.True(result.IsCrossTenant);
Assert.False(result.AllowedViaWhitelist);
}
[Fact]
public async Task CheckScopeAsync_InvalidTenantId_ReturnsDenial()
{
// Arrange
var request = new TenantScopeCheckRequest
{
RequestingTenantId = "ab", // Too short
TargetTenantId = _tenantId
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.False(result.Allowed);
Assert.Equal(TenantScopeDenialReason.InvalidTenantId, result.DenialReason);
}
[Fact]
public async Task CheckScopeAsync_ResourceScopeViolation_ReturnsDenial()
{
// Arrange
var otherTenant = "other-tenant";
var resourceId = "resource-001";
await _resourceStore.RegisterResourceAsync(otherTenant, resourceId, "sbom");
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = _tenantId,
ResourceIds = [resourceId]
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.False(result.Allowed);
Assert.Equal(TenantScopeDenialReason.ResourceScopeViolation, result.DenialReason);
Assert.Contains(resourceId, result.DeniedResources);
}
[Fact]
public async Task CheckScopeAsync_EnforcementDisabled_AllowsEverything()
{
// Arrange
_configStore.SetDefaultConfig(new TenantScopeConfig { Enabled = false });
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
TargetTenantId = "any-tenant"
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.True(result.Allowed);
}
[Fact]
public async Task CheckScopeAsync_ProjectMismatch_ReturnsDenial()
{
// Arrange
var request = new TenantScopeCheckRequest
{
RequestingTenantId = _tenantId,
RequestingProjectId = _projectId,
TargetTenantId = _tenantId,
TargetProjectId = "different-project"
};
// Act
var result = await _enforcer.CheckScopeAsync(request);
// Assert
Assert.False(result.Allowed);
Assert.Equal(TenantScopeDenialReason.ProjectScopeViolation, result.DenialReason);
}
[Fact]
public void CreateScopedPath_WithTenantOnly_CreatesCorrectPath()
{
// Arrange & Act
var result = _enforcer.CreateScopedPath(_tenantId, null, "artifacts/sbom.json");
// Assert
Assert.Equal($"tenants/{_tenantId}/projects/default/artifacts/sbom.json", result.ScopedPath);
Assert.Equal(_tenantId, result.TenantId);
Assert.Equal("default", result.ProjectId);
Assert.Equal("artifacts/sbom.json", result.RelativePath);
}
[Fact]
public void CreateScopedPath_WithTenantAndProject_CreatesCorrectPath()
{
// Arrange & Act
var result = _enforcer.CreateScopedPath(_tenantId, _projectId, "artifacts/sbom.json");
// Assert
Assert.Equal($"tenants/{_tenantId}/projects/{_projectId}/artifacts/sbom.json", result.ScopedPath);
Assert.Equal(_tenantId, result.TenantId);
Assert.Equal(_projectId, result.ProjectId);
}
[Fact]
public void CreateScopedPath_WithLeadingSlash_NormalizesPath()
{
// Arrange & Act
var result = _enforcer.CreateScopedPath(_tenantId, null, "/artifacts/sbom.json");
// Assert
Assert.Equal($"tenants/{_tenantId}/projects/default/artifacts/sbom.json", result.ScopedPath);
Assert.Equal("artifacts/sbom.json", result.RelativePath);
}
[Fact]
public void ParseScopedPath_ValidPath_ExtractsComponents()
{
// Arrange
var path = $"tenants/{_tenantId}/projects/{_projectId}/artifacts/sbom.json";
// Act
var result = _enforcer.ParseScopedPath(path);
// Assert
Assert.NotNull(result);
Assert.Equal(_tenantId, result.TenantId);
Assert.Equal(_projectId, result.ProjectId);
Assert.Equal("artifacts/sbom.json", result.RelativePath);
}
[Fact]
public void ParseScopedPath_PathWithoutProject_ExtractsComponents()
{
// Arrange
var path = $"tenants/{_tenantId}/artifacts/sbom.json";
// Act
var result = _enforcer.ParseScopedPath(path);
// Assert
Assert.NotNull(result);
Assert.Equal(_tenantId, result.TenantId);
Assert.Null(result.ProjectId);
Assert.Equal("artifacts/sbom.json", result.RelativePath);
}
[Fact]
public void ParseScopedPath_InvalidPath_ReturnsNull()
{
// Arrange & Act
var result = _enforcer.ParseScopedPath("invalid-path");
// Assert
Assert.Null(result);
}
[Fact]
public void ValidateIds_ValidTenantId_ReturnsValid()
{
// Act
var result = _enforcer.ValidateIds(_tenantId);
// Assert
Assert.True(result.IsValid);
}
[Fact]
public void ValidateIds_ValidGuidTenantId_ReturnsValid()
{
// Arrange
var guidTenant = Guid.NewGuid().ToString();
// Act
var result = _enforcer.ValidateIds(guidTenant);
// Assert
Assert.True(result.IsValid);
}
[Fact]
public void ValidateIds_TooShortTenantId_ReturnsInvalid()
{
// Act
var result = _enforcer.ValidateIds("ab");
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == TenantScopeErrorCodes.InvalidTenantId);
}
[Fact]
public void ValidateIds_NullTenantId_ReturnsInvalid()
{
// Act
var result = _enforcer.ValidateIds(null!);
// Assert
Assert.False(result.IsValid);
}
[Fact]
public void ValidateIds_InvalidProjectId_ReturnsInvalid()
{
// Act
var result = _enforcer.ValidateIds(_tenantId, "ab");
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == TenantScopeErrorCodes.InvalidProjectId);
}
[Fact]
public void GetScopePrefix_WithDefaultConfig_ReturnsExpectedPrefix()
{
// Act
var prefix = _enforcer.GetScopePrefix(_tenantId, _projectId);
// Assert
Assert.Equal($"tenants/{_tenantId}/projects/{_projectId}", prefix);
}
[Fact]
public void IsPathOwnedByTenant_MatchingTenant_ReturnsTrue()
{
// Arrange
var path = $"tenants/{_tenantId}/projects/{_projectId}/artifacts/sbom.json";
// Act
var result = _enforcer.IsPathOwnedByTenant(path, _tenantId);
// Assert
Assert.True(result);
}
[Fact]
public void IsPathOwnedByTenant_DifferentTenant_ReturnsFalse()
{
// Arrange
var path = $"tenants/{_tenantId}/projects/{_projectId}/artifacts/sbom.json";
// Act
var result = _enforcer.IsPathOwnedByTenant(path, "other-tenant");
// Assert
Assert.False(result);
}
[Fact]
public void CreateProvenanceContext_CreatesValidContext()
{
// Arrange
var entries = new List<TenantScopedManifestEntry>
{
new()
{
Path = "artifacts/sbom.json",
TenantId = _tenantId,
RelativePath = "sbom.json",
Sha256 = "abc123",
SizeBytes = 1024
}
};
var exportRunId = Guid.NewGuid().ToString();
// Act
var context = _enforcer.CreateProvenanceContext(_tenantId, _projectId, exportRunId, entries);
// Assert
Assert.Equal(_tenantId, context.TenantId);
Assert.Equal(_projectId, context.ProjectId);
Assert.Equal(exportRunId, context.ExportRunId);
Assert.Equal(1, context.ArtifactCount);
Assert.Equal(1024, context.TotalSizeBytes);
Assert.Contains(_tenantId, context.ScopePrefix);
}
[Fact]
public void CreateProvenanceContext_WithCrossTenantRefs_IncludesRefs()
{
// Arrange
var entries = new List<TenantScopedManifestEntry>();
var crossTenantRefs = new List<CrossTenantRef>
{
new()
{
SourceTenantId = "other-tenant",
ResourceId = "resource-001",
ResourceType = "sbom",
AllowedVia = "whitelist"
}
};
var exportRunId = Guid.NewGuid().ToString();
// Act
var context = _enforcer.CreateProvenanceContext(_tenantId, _projectId, exportRunId, entries, crossTenantRefs);
// Assert
Assert.NotNull(context.CrossTenantRefs);
Assert.Single(context.CrossTenantRefs);
Assert.Equal("other-tenant", context.CrossTenantRefs[0].SourceTenantId);
}
[Theory]
[InlineData("tenant-abc")]
[InlineData("tenant_123")]
[InlineData("abc")]
[InlineData("a-very-long-tenant-name-that-is-still-valid-12345678")]
public void TenantIdValidator_ValidIds_ReturnsTrue(string tenantId)
{
Assert.True(TenantIdValidator.IsValid(tenantId));
}
[Theory]
[InlineData("ab")] // Too short
[InlineData("tenant with spaces")]
[InlineData("tenant.with.dots")]
[InlineData("-starts-with-hyphen")]
[InlineData("")]
[InlineData(null)]
public void TenantIdValidator_InvalidIds_ReturnsFalse(string? tenantId)
{
Assert.False(TenantIdValidator.IsValid(tenantId));
}
}

View File

@@ -0,0 +1,886 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Verification;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Verification;
public class ExportVerificationServiceTests
{
private readonly InMemoryExportArtifactStore _store;
private readonly ExportVerificationService _service;
private readonly Guid _tenantId = Guid.NewGuid();
private readonly Guid _profileId = Guid.NewGuid();
public ExportVerificationServiceTests()
{
_store = new InMemoryExportArtifactStore();
_service = new ExportVerificationService(_store, NullLogger<ExportVerificationService>.Instance);
}
[Fact]
public async Task VerifyAsync_WithValidRun_ReturnsValid()
{
var runId = SetupValidRun();
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = false,
VerifySignatures = false,
VerifyManifestIntegrity = true,
VerifyEncryption = false
}
};
var result = await _service.VerifyAsync(request);
Assert.True(result.IsValid);
Assert.Equal(VerificationStatus.Valid, result.Status);
Assert.Empty(result.Errors);
}
[Fact]
public async Task VerifyAsync_WithNonExistentRun_ReturnsError()
{
var request = new ExportVerificationRequest
{
RunId = Guid.NewGuid(),
TenantId = _tenantId
};
var result = await _service.VerifyAsync(request);
Assert.False(result.IsValid);
Assert.Single(result.Errors);
Assert.Contains(result.Errors, e => e.Code == VerificationErrorCodes.ManifestNotFound);
}
[Fact]
public async Task VerifyAsync_WithTenantMismatch_ReturnsError()
{
var runId = SetupValidRun();
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = Guid.NewGuid() // Different tenant
};
var result = await _service.VerifyAsync(request);
Assert.False(result.IsValid);
Assert.Single(result.Errors);
Assert.Contains(result.Errors, e => e.Code == VerificationErrorCodes.TenantMismatch);
}
[Fact]
public async Task VerifyAsync_WithHashMismatch_ReturnsInvalid()
{
var runId = Guid.NewGuid();
var content = "test content"u8.ToArray();
var wrongHash = "0000000000000000000000000000000000000000000000000000000000000000";
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId
});
_store.SetManifest(runId, CreateValidManifest());
_store.AddArtifact(runId, "test.txt", content, wrongHash);
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = true,
VerifySignatures = false,
VerifyManifestIntegrity = true
}
};
var result = await _service.VerifyAsync(request);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == VerificationErrorCodes.HashMismatch);
}
[Fact]
public async Task VerifyAsync_WithMatchingHash_ReturnsValid()
{
var runId = Guid.NewGuid();
var content = "test content"u8.ToArray();
var hash = ComputeHash(content);
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId
});
_store.SetManifest(runId, CreateValidManifest());
_store.AddArtifact(runId, "test.txt", content, hash);
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = true,
VerifySignatures = false,
VerifyManifestIntegrity = true
}
};
var result = await _service.VerifyAsync(request);
Assert.True(result.IsValid);
Assert.Single(result.FileHashes);
Assert.True(result.FileHashes[0].IsValid);
}
[Fact]
public async Task VerifyManifestAsync_WithValidJson_ReturnsValid()
{
var manifest = CreateValidManifest();
var result = await _service.VerifyManifestAsync(manifest);
Assert.True(result.IsValid);
Assert.NotNull(result.ManifestDigest);
Assert.Equal("1.0", result.FormatVersion);
}
[Fact]
public async Task VerifyManifestAsync_WithInvalidJson_ReturnsInvalid()
{
var invalidManifest = "not valid json {{{";
var result = await _service.VerifyManifestAsync(invalidManifest);
Assert.False(result.IsValid);
Assert.NotEmpty(result.ValidationErrors);
}
[Fact]
public async Task VerifyManifestAsync_WithNdjson_ReturnsValid()
{
var ndjsonManifest = "{\"path\":\"file1.txt\",\"hash\":\"abc123\"}\n{\"path\":\"file2.txt\",\"hash\":\"def456\"}\n";
var result = await _service.VerifyManifestAsync(ndjsonManifest);
Assert.True(result.IsValid);
Assert.Equal(2, result.EntryCount);
}
[Fact]
public async Task VerifySignatureAsync_WithValidDsse_ReturnsValid()
{
var payload = "test payload"u8.ToArray();
var encodedPayload = Convert.ToBase64String(payload);
var dsseEnvelope = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = encodedPayload,
signatures = new[]
{
new { keyid = "key-1", sig = "base64signature" }
}
});
var options = new ExportVerificationOptions
{
TrustedKeys = ["key-1"]
};
var result = await _service.VerifySignatureAsync(dsseEnvelope, payload, options);
Assert.True(result.IsValid);
Assert.Equal("key-1", result.KeyId);
}
[Fact]
public async Task VerifySignatureAsync_WithUntrustedKey_ReturnsInvalid()
{
var payload = "test payload"u8.ToArray();
var encodedPayload = Convert.ToBase64String(payload);
var dsseEnvelope = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = encodedPayload,
signatures = new[]
{
new { keyid = "untrusted-key", sig = "base64signature" }
}
});
var options = new ExportVerificationOptions
{
TrustedKeys = ["trusted-key-1", "trusted-key-2"]
};
var result = await _service.VerifySignatureAsync(dsseEnvelope, payload, options);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("not in trusted keys"));
}
[Fact]
public async Task VerifySignatureAsync_WithNoSignatures_ReturnsInvalid()
{
var payload = "test payload"u8.ToArray();
var dsseEnvelope = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = Convert.ToBase64String(payload),
signatures = Array.Empty<object>()
});
var result = await _service.VerifySignatureAsync(dsseEnvelope, payload, new ExportVerificationOptions());
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("no signatures"));
}
[Fact]
public void ComputeHash_WithSha256_ReturnsCorrectHash()
{
var content = "hello world"u8.ToArray();
var hash = _service.ComputeHash(content);
// SHA-256 hash of "hello world"
Assert.Equal("b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9", hash);
}
[Fact]
public void ComputeHash_WithSha512_ReturnsCorrectHash()
{
var content = "hello world"u8.ToArray();
var hash = _service.ComputeHash(content, "sha512");
Assert.NotNull(hash);
Assert.Equal(128, hash.Length); // SHA-512 produces 64 bytes = 128 hex chars
}
[Fact]
public async Task VerifyStreamingAsync_EmitsProgressEvents()
{
var runId = SetupRunWithArtifacts(3);
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = true,
VerifySignatures = false,
VerifyManifestIntegrity = true
}
};
var events = new List<VerificationProgressEvent>();
await foreach (var evt in _service.VerifyStreamingAsync(request))
{
events.Add(evt);
}
Assert.Contains(events, e => e.Type == VerificationProgressType.Started);
Assert.Contains(events, e => e.Type == VerificationProgressType.ManifestVerified);
Assert.Contains(events, e => e.Type == VerificationProgressType.HashVerificationStarted);
Assert.Contains(events, e => e.Type == VerificationProgressType.HashVerificationProgress);
Assert.Contains(events, e => e.Type == VerificationProgressType.HashVerificationComplete);
Assert.Contains(events, e => e.Type == VerificationProgressType.Completed);
}
[Fact]
public async Task VerifyStreamingAsync_TracksProgressCorrectly()
{
var runId = SetupRunWithArtifacts(5);
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = true,
VerifySignatures = false,
VerifyManifestIntegrity = true
}
};
var progressEvents = new List<VerificationProgressEvent>();
await foreach (var evt in _service.VerifyStreamingAsync(request))
{
if (evt.Type == VerificationProgressType.HashVerificationProgress)
{
progressEvents.Add(evt);
}
}
// Should have 5 progress events for 5 artifacts
Assert.Equal(5, progressEvents.Count);
// Progress should increase
for (int i = 1; i < progressEvents.Count; i++)
{
Assert.True(progressEvents[i].VerifiedItems >= progressEvents[i - 1].VerifiedItems);
}
}
[Fact]
public async Task VerifyAsync_WithMissingArtifact_ReturnsHashError()
{
var runId = Guid.NewGuid();
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId
});
_store.SetManifest(runId, CreateValidManifest());
// Add artifact info but no content (simulating missing file)
_store.AddArtifact(runId, "existing.txt", "content"u8.ToArray(), ComputeHash("content"u8.ToArray()));
// Now simulate a missing artifact by reading the result
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = true,
VerifySignatures = false
}
};
var result = await _service.VerifyAsync(request);
// The existing file should verify fine
Assert.True(result.FileHashes.Any(h => h.IsValid));
}
[Fact]
public async Task VerifyAsync_WithEncryptionMode_VerifiesMetadata()
{
var runId = Guid.NewGuid();
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId,
EncryptionMode = "aes-gcm+age"
});
_store.SetManifest(runId, CreateValidManifest());
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = false,
VerifySignatures = false,
VerifyEncryption = true
}
};
var result = await _service.VerifyAsync(request);
Assert.NotNull(result.Encryption);
Assert.True(result.Encryption.IsValid);
Assert.Equal("aes-gcm+age", result.Encryption.Mode);
}
[Fact]
public async Task VerifyAsync_WithInvalidEncryptionMode_ReturnsError()
{
var runId = Guid.NewGuid();
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId,
EncryptionMode = "invalid-mode"
});
_store.SetManifest(runId, CreateValidManifest());
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = false,
VerifySignatures = false,
VerifyEncryption = true
}
};
var result = await _service.VerifyAsync(request);
Assert.NotNull(result.Encryption);
Assert.False(result.Encryption.IsValid);
}
[Fact]
public async Task VerifyAsync_WithNoSignatureButSignatureVerificationEnabled_AddsWarning()
{
var runId = SetupValidRun();
var request = new ExportVerificationRequest
{
RunId = runId,
TenantId = _tenantId,
Options = new ExportVerificationOptions
{
VerifyHashes = false,
VerifySignatures = true,
VerifyManifestIntegrity = false
}
};
var result = await _service.VerifyAsync(request);
Assert.Contains(result.Warnings, w => w.Contains("signature"));
}
private Guid SetupValidRun()
{
var runId = Guid.NewGuid();
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId
});
_store.SetManifest(runId, CreateValidManifest());
return runId;
}
private Guid SetupRunWithArtifacts(int count)
{
var runId = Guid.NewGuid();
_store.AddRun(new RunMetadata
{
RunId = runId,
TenantId = _tenantId,
ProfileId = _profileId
});
_store.SetManifest(runId, CreateValidManifest());
for (int i = 0; i < count; i++)
{
var content = Encoding.UTF8.GetBytes($"content-{i}");
var hash = ComputeHash(content);
_store.AddArtifact(runId, $"file-{i}.txt", content, hash);
}
return runId;
}
private static string CreateValidManifest()
{
return JsonSerializer.Serialize(new
{
version = "1.0",
files = new[]
{
new { path = "test.txt", hash = "abc123" }
}
});
}
private static string ComputeHash(byte[] content)
{
var hash = SHA256.HashData(content);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Tests for pack run integration verification.
/// </summary>
public class PackRunVerificationTests
{
private readonly InMemoryExportArtifactStore _artifactStore;
private readonly InMemoryPackRunAttestationStore _packRunStore;
private readonly ExportVerificationService _service;
private readonly Guid _tenantId = Guid.NewGuid();
private readonly Guid _profileId = Guid.NewGuid();
public PackRunVerificationTests()
{
_artifactStore = new InMemoryExportArtifactStore();
_packRunStore = new InMemoryPackRunAttestationStore();
_service = new ExportVerificationService(
_artifactStore,
_packRunStore,
NullLogger<ExportVerificationService>.Instance);
}
[Fact]
public async Task VerifyPackRunIntegrationAsync_WithValidAttestation_ReturnsValid()
{
var exportRunId = Guid.NewGuid();
var packRunId = Guid.NewGuid();
var attestationId = $"attestation-{Guid.NewGuid()}";
// Setup export run
_artifactStore.AddRun(new RunMetadata
{
RunId = exportRunId,
TenantId = _tenantId,
ProfileId = _profileId
});
_artifactStore.SetManifest(exportRunId, "{}");
// Setup pack run attestation
_packRunStore.AddAttestation(new PackRunAttestationData
{
PackRunId = packRunId,
AttestationId = attestationId,
TenantId = _tenantId,
Status = "Signed",
PredicateType = "https://slsa.dev/provenance/v1",
Subjects =
[
new AttestationSubject
{
Name = "artifact.tar.gz",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
}
],
CreatedAt = DateTimeOffset.UtcNow
});
var request = new PackRunVerificationRequest
{
ExportRunId = exportRunId,
TenantId = _tenantId,
PackRunId = packRunId,
VerifySubjectAlignment = false,
VerifyProvenanceChain = true
};
var result = await _service.VerifyPackRunIntegrationAsync(request);
Assert.True(result.IsValid);
Assert.NotNull(result.Attestation);
Assert.True(result.Attestation.IsValid);
Assert.NotEmpty(result.ProvenanceLinks);
}
[Fact]
public async Task VerifyPackRunIntegrationAsync_WithMissingAttestation_ReturnsError()
{
var exportRunId = Guid.NewGuid();
var packRunId = Guid.NewGuid();
// Setup export run only
_artifactStore.AddRun(new RunMetadata
{
RunId = exportRunId,
TenantId = _tenantId,
ProfileId = _profileId
});
_artifactStore.SetManifest(exportRunId, "{}");
var request = new PackRunVerificationRequest
{
ExportRunId = exportRunId,
TenantId = _tenantId,
PackRunId = packRunId
};
var result = await _service.VerifyPackRunIntegrationAsync(request);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == VerificationErrorCodes.PackRunNotFound);
}
[Fact]
public void VerifySubjectAlignment_WithMatchingSubjects_ReturnsAligned()
{
var exportSubjects = new List<AttestationSubject>
{
new()
{
Name = "file1.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
},
new()
{
Name = "file2.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "def456" }
}
};
var packRunSubjects = new List<AttestationSubject>
{
new()
{
Name = "file1.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
},
new()
{
Name = "file2.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "def456" }
}
};
var result = _service.VerifySubjectAlignment(exportSubjects, packRunSubjects);
Assert.True(result.IsAligned);
Assert.Equal(2, result.MatchedCount);
Assert.Empty(result.DigestMismatches);
Assert.Empty(result.ExportOnlySubjects);
Assert.Empty(result.PackRunOnlySubjects);
}
[Fact]
public void VerifySubjectAlignment_WithDigestMismatch_ReturnsNotAligned()
{
var exportSubjects = new List<AttestationSubject>
{
new()
{
Name = "file1.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
}
};
var packRunSubjects = new List<AttestationSubject>
{
new()
{
Name = "file1.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "different" }
}
};
var result = _service.VerifySubjectAlignment(exportSubjects, packRunSubjects);
Assert.False(result.IsAligned);
Assert.Equal(0, result.MatchedCount);
Assert.Single(result.DigestMismatches);
Assert.Equal("file1.txt", result.DigestMismatches[0].SubjectName);
}
[Fact]
public void VerifySubjectAlignment_WithExportOnlySubjects_ReturnsNotAligned()
{
var exportSubjects = new List<AttestationSubject>
{
new()
{
Name = "file1.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
},
new()
{
Name = "extra.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "xyz789" }
}
};
var packRunSubjects = new List<AttestationSubject>
{
new()
{
Name = "file1.txt",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
}
};
var result = _service.VerifySubjectAlignment(exportSubjects, packRunSubjects);
Assert.False(result.IsAligned);
Assert.Equal(1, result.MatchedCount);
Assert.Single(result.ExportOnlySubjects);
Assert.Contains("extra.txt", result.ExportOnlySubjects);
}
[Fact]
public void VerifySubjectAlignment_WithEmptySubjects_ReturnsAligned()
{
var exportSubjects = new List<AttestationSubject>();
var packRunSubjects = new List<AttestationSubject>();
var result = _service.VerifySubjectAlignment(exportSubjects, packRunSubjects);
Assert.True(result.IsAligned);
Assert.Equal(0, result.MatchedCount);
}
[Fact]
public async Task ExtractProvenanceLinksAsync_WithValidAttestation_ReturnsLinks()
{
var exportRunId = Guid.NewGuid();
var packRunId = Guid.NewGuid();
var attestationId = $"att-{Guid.NewGuid()}";
_packRunStore.AddAttestation(new PackRunAttestationData
{
PackRunId = packRunId,
AttestationId = attestationId,
TenantId = _tenantId,
Status = "Signed",
Subjects =
[
new AttestationSubject
{
Name = "artifact.tar.gz",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
}
]
});
var links = await _service.ExtractProvenanceLinksAsync(exportRunId, packRunId);
Assert.NotEmpty(links);
Assert.Contains(links, l => l.Type == ProvenanceLinkType.ExportToAttestation);
Assert.Contains(links, l => l.Type == ProvenanceLinkType.AttestationToSubject);
Assert.Contains(links, l => l.Type == ProvenanceLinkType.PackRunToArtifact);
Assert.Contains(links, l => l.Type == ProvenanceLinkType.ArtifactToExport);
}
[Fact]
public async Task ExtractProvenanceLinksAsync_WithoutAttestation_ReturnsEmptyList()
{
var exportRunId = Guid.NewGuid();
var packRunId = Guid.NewGuid();
var links = await _service.ExtractProvenanceLinksAsync(exportRunId, packRunId);
Assert.Empty(links);
}
[Fact]
public async Task VerifyPackRunIntegrationAsync_WithProvenanceChain_VerifiesCompleteness()
{
var exportRunId = Guid.NewGuid();
var packRunId = Guid.NewGuid();
var attestationId = $"att-{Guid.NewGuid()}";
_artifactStore.AddRun(new RunMetadata
{
RunId = exportRunId,
TenantId = _tenantId,
ProfileId = _profileId
});
_artifactStore.SetManifest(exportRunId, "{}");
_packRunStore.AddAttestation(new PackRunAttestationData
{
PackRunId = packRunId,
AttestationId = attestationId,
TenantId = _tenantId,
Status = "Signed",
Subjects =
[
new AttestationSubject
{
Name = "artifact.tar.gz",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
}
]
});
var request = new PackRunVerificationRequest
{
ExportRunId = exportRunId,
TenantId = _tenantId,
PackRunId = packRunId,
VerifyProvenanceChain = true,
VerifySubjectAlignment = false
};
var result = await _service.VerifyPackRunIntegrationAsync(request);
Assert.True(result.IsValid);
Assert.NotNull(result.ProvenanceChain);
Assert.True(result.ProvenanceChain.IsComplete);
Assert.True(result.ProvenanceChain.ChainDepth > 0);
}
[Fact]
public async Task VerifyPackRunIntegrationAsync_WithoutPackRunId_ReturnsValidWhenVerificationDisabled()
{
var exportRunId = Guid.NewGuid();
_artifactStore.AddRun(new RunMetadata
{
RunId = exportRunId,
TenantId = _tenantId,
ProfileId = _profileId
});
_artifactStore.SetManifest(exportRunId, "{}");
// Don't specify PackRunId - verification should pass when both alignment and chain verification are disabled
var request = new PackRunVerificationRequest
{
ExportRunId = exportRunId,
TenantId = _tenantId,
VerifySubjectAlignment = false,
VerifyProvenanceChain = false
};
var result = await _service.VerifyPackRunIntegrationAsync(request);
// With no pack run ID and verification disabled, should pass
Assert.True(result.IsValid);
Assert.Null(result.Attestation);
}
[Fact]
public async Task VerifyPackRunIntegrationAsync_WithProvenanceChainVerificationEnabled_RequiresPackRun()
{
var exportRunId = Guid.NewGuid();
_artifactStore.AddRun(new RunMetadata
{
RunId = exportRunId,
TenantId = _tenantId,
ProfileId = _profileId
});
_artifactStore.SetManifest(exportRunId, "{}");
// Don't specify PackRunId but enable provenance chain verification
var request = new PackRunVerificationRequest
{
ExportRunId = exportRunId,
TenantId = _tenantId,
VerifySubjectAlignment = false,
VerifyProvenanceChain = true
};
var result = await _service.VerifyPackRunIntegrationAsync(request);
// With provenance verification enabled but no pack run, should have incomplete chain
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == VerificationErrorCodes.ProvenanceChainBroken);
}
}

View File

@@ -0,0 +1,81 @@
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Interface for Trivy DB adapter that transforms StellaOps advisories to Trivy format.
/// </summary>
public interface ITrivyDbAdapter
{
/// <summary>
/// Adapter name.
/// </summary>
string Name { get; }
/// <summary>
/// Adapter ID for export metadata.
/// </summary>
string AdapterId { get; }
/// <summary>
/// Current schema version.
/// </summary>
TrivySchemaVersion SchemaVersion { get; }
/// <summary>
/// Validates the adapter configuration and schema version.
/// </summary>
/// <exception cref="TrivyAdapterException">Thrown when validation fails.</exception>
void ValidateConfiguration();
/// <summary>
/// Transforms a collection of StellaOps advisories to Trivy format.
/// </summary>
/// <param name="advisories">Input advisories.</param>
/// <param name="context">Adapter execution context.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Transformation result with Trivy records and metadata.</returns>
Task<TrivyAdapterResult> TransformAsync(
IAsyncEnumerable<TrivyAdapterInputAdvisory> advisories,
TrivyAdapterContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Transforms a single advisory to Trivy format.
/// </summary>
/// <param name="advisory">Input advisory.</param>
/// <returns>Transformed records or empty if advisory is unsupported.</returns>
IReadOnlyList<TrivyVulnerabilityRecord> TransformAdvisory(TrivyAdapterInputAdvisory advisory);
/// <summary>
/// Validates a single advisory.
/// </summary>
/// <param name="advisory">Advisory to validate.</param>
/// <returns>Validation result.</returns>
TrivyAdvisoryValidationResult ValidateAdvisory(TrivyAdapterInputAdvisory advisory);
/// <summary>
/// Checks if a namespace is supported.
/// </summary>
bool IsNamespaceSupported(string? vendor, string? product);
/// <summary>
/// Checks if an ecosystem is supported.
/// </summary>
bool IsEcosystemSupported(string? ecosystem);
}
/// <summary>
/// Result of advisory validation.
/// </summary>
public sealed record TrivyAdvisoryValidationResult
{
public bool IsValid { get; init; }
public string? ErrorCode { get; init; }
public string? ErrorMessage { get; init; }
public IReadOnlyList<string>? Warnings { get; init; }
public static TrivyAdvisoryValidationResult Valid(IReadOnlyList<string>? warnings = null)
=> new() { IsValid = true, Warnings = warnings };
public static TrivyAdvisoryValidationResult Invalid(string errorCode, string message)
=> new() { IsValid = false, ErrorCode = errorCode, ErrorMessage = message };
}

View File

@@ -0,0 +1,74 @@
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Interface for Trivy Java DB adapter that transforms Java ecosystem advisories.
/// </summary>
public interface ITrivyJavaDbAdapter
{
/// <summary>
/// Adapter name.
/// </summary>
string Name { get; }
/// <summary>
/// Adapter ID for export metadata.
/// </summary>
string AdapterId { get; }
/// <summary>
/// Current schema version.
/// </summary>
TrivySchemaVersion SchemaVersion { get; }
/// <summary>
/// Supported Java ecosystems.
/// </summary>
IReadOnlySet<string> SupportedEcosystems { get; }
/// <summary>
/// Validates the adapter configuration and schema version.
/// </summary>
/// <exception cref="TrivyAdapterException">Thrown when validation fails.</exception>
void ValidateConfiguration();
/// <summary>
/// Transforms a collection of advisories to Trivy Java DB format.
/// Only processes advisories with Java ecosystem packages.
/// </summary>
/// <param name="advisories">Input advisories (filters to Java ecosystems internally).</param>
/// <param name="context">Adapter execution context.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Transformation result with Java vulnerability records and metadata.</returns>
Task<TrivyJavaAdapterResult> TransformAsync(
IAsyncEnumerable<TrivyAdapterInputAdvisory> advisories,
TrivyAdapterContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Transforms a single advisory to Java DB format.
/// </summary>
/// <param name="advisory">Input advisory.</param>
/// <returns>Transformed Java records or empty if advisory has no Java packages.</returns>
IReadOnlyList<TrivyJavaVulnerabilityRecord> TransformAdvisory(TrivyAdapterInputAdvisory advisory);
/// <summary>
/// Checks if an advisory contains Java ecosystem packages.
/// </summary>
bool HasJavaPackages(TrivyAdapterInputAdvisory advisory);
/// <summary>
/// Parses Maven coordinates from package name or PURL.
/// </summary>
/// <param name="packageName">Package name (may contain group:artifact format).</param>
/// <param name="purl">Package URL (optional).</param>
/// <returns>Parsed coordinates or null if unable to parse.</returns>
MavenCoordinates? ParseMavenCoordinates(string? packageName, string? purl);
}
/// <summary>
/// Maven coordinates (group:artifact:version).
/// </summary>
public sealed record MavenCoordinates(
string GroupId,
string ArtifactId,
string? Version = null);

View File

@@ -0,0 +1,97 @@
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Error codes for Trivy adapter operations.
/// </summary>
public static class TrivyAdapterErrors
{
/// <summary>
/// Schema version is not supported.
/// </summary>
public const string UnsupportedSchemaVersion = "ERR_EXPORT_UNSUPPORTED_SCHEMA";
/// <summary>
/// Namespace is not in the allowlist.
/// </summary>
public const string UnsupportedNamespace = "ERR_EXPORT_UNSUPPORTED_NAMESPACE";
/// <summary>
/// Export produced no records and AllowEmpty is false.
/// </summary>
public const string EmptyExport = "ERR_EXPORT_EMPTY";
/// <summary>
/// Invalid advisory data.
/// </summary>
public const string InvalidAdvisory = "ERR_EXPORT_INVALID_ADVISORY";
/// <summary>
/// General adapter error.
/// </summary>
public const string AdapterError = "ERR_EXPORT_ADAPTER_TRIVY";
}
/// <summary>
/// Exception thrown when Trivy adapter encounters an error.
/// </summary>
public sealed class TrivyAdapterException : Exception
{
/// <summary>
/// Error code from TrivyAdapterErrors.
/// </summary>
public string ErrorCode { get; }
/// <summary>
/// Additional error details.
/// </summary>
public IReadOnlyDictionary<string, string>? Details { get; }
public TrivyAdapterException(string errorCode, string message)
: base(message)
{
ErrorCode = errorCode;
}
public TrivyAdapterException(string errorCode, string message, IReadOnlyDictionary<string, string>? details)
: base(message)
{
ErrorCode = errorCode;
Details = details;
}
public TrivyAdapterException(string errorCode, string message, Exception innerException)
: base(message, innerException)
{
ErrorCode = errorCode;
}
/// <summary>
/// Creates an unsupported schema version exception.
/// </summary>
public static TrivyAdapterException UnsupportedSchema(int requestedVersion)
=> new(
TrivyAdapterErrors.UnsupportedSchemaVersion,
$"Schema version {requestedVersion} is not supported. Only schema version 2 is currently implemented.",
new Dictionary<string, string>
{
["requestedVersion"] = requestedVersion.ToString(),
["supportedVersions"] = "2"
});
/// <summary>
/// Creates an unsupported namespace exception.
/// </summary>
public static TrivyAdapterException UnsupportedNamespace(string @namespace)
=> new(
TrivyAdapterErrors.UnsupportedNamespace,
$"Namespace '{@namespace}' is not supported by the Trivy adapter.",
new Dictionary<string, string> { ["namespace"] = @namespace });
/// <summary>
/// Creates an empty export exception.
/// </summary>
public static TrivyAdapterException EmptyExport()
=> new(
TrivyAdapterErrors.EmptyExport,
"Export produced no records and AllowEmpty is disabled.");
}

View File

@@ -0,0 +1,278 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Input advisory record for Trivy adapter (StellaOps normalized format).
/// </summary>
public sealed record TrivyAdapterInputAdvisory
{
/// <summary>
/// Source information.
/// </summary>
public required TrivyAdapterSource Source { get; init; }
/// <summary>
/// Identifiers (CVE, CWE, aliases).
/// </summary>
public required TrivyAdapterIdentifiers Identifiers { get; init; }
/// <summary>
/// Advisory summary/title.
/// </summary>
public string? Summary { get; init; }
/// <summary>
/// Full description.
/// </summary>
public string? Description { get; init; }
/// <summary>
/// Severity information.
/// </summary>
public TrivyAdapterSeverity? Severity { get; init; }
/// <summary>
/// CVSS scores.
/// </summary>
public IReadOnlyList<TrivyAdapterCvss>? Cvss { get; init; }
/// <summary>
/// Affected packages.
/// </summary>
public IReadOnlyList<TrivyAdapterAffected>? Affects { get; init; }
/// <summary>
/// Publication date.
/// </summary>
public DateTimeOffset? Published { get; init; }
/// <summary>
/// Last modification date.
/// </summary>
public DateTimeOffset? Modified { get; init; }
/// <summary>
/// Vendor-specific statement.
/// </summary>
public string? VendorStatement { get; init; }
/// <summary>
/// Reference URLs.
/// </summary>
public IReadOnlyList<string>? References { get; init; }
}
/// <summary>
/// Advisory source.
/// </summary>
public sealed record TrivyAdapterSource
{
/// <summary>
/// Vendor name (e.g., "Ubuntu", "Red Hat").
/// </summary>
public required string Vendor { get; init; }
/// <summary>
/// Product/version (e.g., "22.04").
/// </summary>
public string? Product { get; init; }
}
/// <summary>
/// Advisory identifiers.
/// </summary>
public sealed record TrivyAdapterIdentifiers
{
/// <summary>
/// CVE identifiers.
/// </summary>
public IReadOnlyList<string>? Cve { get; init; }
/// <summary>
/// CWE identifiers.
/// </summary>
public IReadOnlyList<string>? Cwe { get; init; }
/// <summary>
/// Other aliases.
/// </summary>
public IReadOnlyList<string>? Aliases { get; init; }
}
/// <summary>
/// Severity information.
/// </summary>
public sealed record TrivyAdapterSeverity
{
/// <summary>
/// Normalized severity (critical, high, medium, low, none, info).
/// </summary>
public string? Normalized { get; init; }
/// <summary>
/// Original vendor severity.
/// </summary>
public string? Vendor { get; init; }
}
/// <summary>
/// CVSS score entry.
/// </summary>
public sealed record TrivyAdapterCvss
{
/// <summary>
/// CVSS vector string.
/// </summary>
public required string Vector { get; init; }
/// <summary>
/// CVSS base score.
/// </summary>
public double Score { get; init; }
/// <summary>
/// Score source (e.g., "NVD", "vendor").
/// </summary>
public string? Source { get; init; }
/// <summary>
/// CVSS version (e.g., "2.0", "3.0", "3.1").
/// </summary>
public string? Version { get; init; }
}
/// <summary>
/// Affected package entry.
/// </summary>
public sealed record TrivyAdapterAffected
{
/// <summary>
/// Package information.
/// </summary>
public required TrivyAdapterPackage Package { get; init; }
/// <summary>
/// Vulnerable version range (e.g., "< 1.2.3").
/// </summary>
public string? VulnerableRange { get; init; }
/// <summary>
/// Remediation information.
/// </summary>
public IReadOnlyList<TrivyAdapterRemediation>? Remediations { get; init; }
/// <summary>
/// Affected states (for CPE-based advisories).
/// </summary>
public TrivyAdapterStates? States { get; init; }
}
/// <summary>
/// Package information.
/// </summary>
public sealed record TrivyAdapterPackage
{
/// <summary>
/// Package name.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Package ecosystem (npm, pip, maven, etc.).
/// </summary>
public string? Ecosystem { get; init; }
/// <summary>
/// NEVRA for RPM packages.
/// </summary>
public string? Nevra { get; init; }
/// <summary>
/// EVR for Debian packages.
/// </summary>
public string? Evr { get; init; }
/// <summary>
/// Package URL (PURL).
/// </summary>
public string? Purl { get; init; }
/// <summary>
/// Maven group ID (for Java packages).
/// </summary>
public string? Group { get; init; }
/// <summary>
/// Maven artifact ID (for Java packages).
/// </summary>
public string? Artifact { get; init; }
/// <summary>
/// Package version.
/// </summary>
public string? Version { get; init; }
}
/// <summary>
/// Remediation information.
/// </summary>
public sealed record TrivyAdapterRemediation
{
/// <summary>
/// Fixed version.
/// </summary>
public string? FixedVersion { get; init; }
/// <summary>
/// Remediation URLs.
/// </summary>
public IReadOnlyList<string>? Urls { get; init; }
}
/// <summary>
/// State information for CPE-based advisories.
/// </summary>
public sealed record TrivyAdapterStates
{
/// <summary>
/// CPE strings.
/// </summary>
public IReadOnlyList<string>? Cpes { get; init; }
}
/// <summary>
/// Context for adapter execution.
/// </summary>
public sealed record TrivyAdapterContext
{
/// <summary>
/// Export run ID.
/// </summary>
public required string RunId { get; init; }
/// <summary>
/// Export profile ID.
/// </summary>
public required string ProfileId { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Policy snapshot ID (optional).
/// </summary>
public string? PolicySnapshotId { get; init; }
/// <summary>
/// Target Trivy version (for compatibility).
/// </summary>
public string TrivyVersion { get; init; } = "0.50.1";
/// <summary>
/// Timestamp for generated metadata.
/// </summary>
public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow;
}

View File

@@ -0,0 +1,87 @@
using System.ComponentModel.DataAnnotations;
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Configuration options for the Trivy DB adapter.
/// </summary>
public sealed class TrivyAdapterOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "ExportCenter:Adapters:Trivy";
/// <summary>
/// Enforce schema version. Supported: 2. Version 3+ throws unsupported error until implemented.
/// </summary>
[Range(2, 2, ErrorMessage = "Only schema version 2 is currently supported.")]
public int SchemaVersion { get; set; } = 2;
/// <summary>
/// Enable Java DB bundle generation for Maven, Gradle, and SBT ecosystems.
/// </summary>
public bool IncludeJavaDb { get; set; }
/// <summary>
/// Fail when no records match. If false, produces empty bundle.
/// </summary>
public bool AllowEmpty { get; set; }
/// <summary>
/// Maximum CVSS vectors per vulnerability entry to avoid oversized payloads.
/// </summary>
[Range(1, 100)]
public int MaxCvssVectorsPerEntry { get; set; } = 5;
/// <summary>
/// Maximum title length before truncation to description.
/// </summary>
[Range(64, 1024)]
public int MaxTitleLength { get; set; } = 256;
/// <summary>
/// Supported namespaces allowlist. If empty, all namespaces are allowed.
/// </summary>
public HashSet<string> SupportedNamespaces { get; set; } = new(StringComparer.OrdinalIgnoreCase)
{
"alpine",
"amazon",
"debian",
"oracle",
"redhat",
"rocky",
"suse",
"ubuntu",
"photon",
"mariner",
"wolfi",
"chainguard"
};
/// <summary>
/// Supported OSS ecosystems.
/// </summary>
public HashSet<string> SupportedEcosystems { get; set; } = new(StringComparer.OrdinalIgnoreCase)
{
"npm",
"pip",
"pypi",
"nuget",
"go",
"cargo",
"composer",
"gem",
"rubygems"
};
/// <summary>
/// Java ecosystems supported by the Java DB extension.
/// </summary>
public HashSet<string> JavaEcosystems { get; set; } = new(StringComparer.OrdinalIgnoreCase)
{
"maven",
"gradle",
"sbt"
};
}

View File

@@ -0,0 +1,385 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Core Trivy DB adapter implementation.
/// </summary>
public sealed class TrivyDbAdapter : ITrivyDbAdapter
{
private readonly TrivyAdapterOptions _options;
private readonly TrivyNamespaceMapper _namespaceMapper;
private readonly ILogger<TrivyDbAdapter> _logger;
public string Name => "trivy:db";
public string AdapterId => "adapter:trivy:db";
public TrivySchemaVersion SchemaVersion => (TrivySchemaVersion)_options.SchemaVersion;
public TrivyDbAdapter(
IOptions<TrivyAdapterOptions> options,
ILogger<TrivyDbAdapter> logger)
{
_options = options.Value;
_namespaceMapper = new TrivyNamespaceMapper(_options);
_logger = logger;
}
/// <inheritdoc />
public void ValidateConfiguration()
{
if (_options.SchemaVersion != 2)
{
throw TrivyAdapterException.UnsupportedSchema(_options.SchemaVersion);
}
}
/// <inheritdoc />
public async Task<TrivyAdapterResult> TransformAsync(
IAsyncEnumerable<TrivyAdapterInputAdvisory> advisories,
TrivyAdapterContext context,
CancellationToken cancellationToken = default)
{
ValidateConfiguration();
var records = new List<TrivyVulnerabilityRecord>();
var seenKeys = new HashSet<string>(StringComparer.Ordinal);
var stats = new TransformStats();
await foreach (var advisory in advisories.WithCancellation(cancellationToken))
{
stats.TotalInput++;
var validation = ValidateAdvisory(advisory);
if (!validation.IsValid)
{
stats.SkippedInvalid++;
_logger.LogDebug(
"Skipping advisory {AdvisoryId}: {Error}",
advisory.Identifiers.Cve?.FirstOrDefault() ?? "unknown",
validation.ErrorMessage);
continue;
}
var transformed = TransformAdvisory(advisory);
foreach (var record in transformed)
{
var key = GetDeduplicationKey(record);
if (seenKeys.Add(key))
{
records.Add(record);
}
else
{
stats.Duplicates++;
}
}
}
if (records.Count == 0 && !_options.AllowEmpty)
{
throw TrivyAdapterException.EmptyExport();
}
// Sort records for deterministic output
var sortedRecords = records
.OrderBy(r => r.Namespace, StringComparer.Ordinal)
.ThenBy(r => r.Package.Name, StringComparer.Ordinal)
.ThenBy(r => r.Vulnerability.Id, StringComparer.Ordinal)
.ToList();
var metadata = CreateMetadata(context);
_logger.LogInformation(
"Trivy adapter transformed {OutputCount} records from {InputCount} advisories " +
"(skipped: {SkippedInvalid} invalid, {Duplicates} duplicates)",
sortedRecords.Count,
stats.TotalInput,
stats.SkippedInvalid,
stats.Duplicates);
return new TrivyAdapterResult
{
Records = sortedRecords,
Metadata = metadata,
TotalInputRecords = stats.TotalInput,
SkippedUnsupportedNamespace = 0,
SkippedInvalidData = stats.SkippedInvalid,
DuplicatesRemoved = stats.Duplicates
};
}
/// <inheritdoc />
public IReadOnlyList<TrivyVulnerabilityRecord> TransformAdvisory(TrivyAdapterInputAdvisory advisory)
{
var records = new List<TrivyVulnerabilityRecord>();
// If no affects, create a single record from the advisory
if (advisory.Affects is null || advisory.Affects.Count == 0)
{
var namespaceResult = _namespaceMapper.MapNamespace(
advisory.Source.Vendor,
advisory.Source.Product);
if (namespaceResult is null)
{
return records;
}
var vulnerability = CreateVulnerability(advisory);
var package = new TrivyPackage { Name = "unknown" };
records.Add(new TrivyVulnerabilityRecord
{
Namespace = TrivyNamespaceMapper.FormatNamespace(namespaceResult),
Package = package,
Vulnerability = vulnerability
});
return records;
}
// Create records for each affected package
foreach (var affected in advisory.Affects)
{
var namespaceResult = ResolveNamespace(advisory.Source, affected.Package);
if (namespaceResult is null)
{
continue;
}
var vulnerability = CreateVulnerability(advisory);
var package = CreatePackage(affected);
records.Add(new TrivyVulnerabilityRecord
{
Namespace = TrivyNamespaceMapper.FormatNamespace(namespaceResult),
Package = package,
Vulnerability = vulnerability
});
}
return records;
}
/// <inheritdoc />
public TrivyAdvisoryValidationResult ValidateAdvisory(TrivyAdapterInputAdvisory advisory)
{
var warnings = new List<string>();
// Must have at least one identifier
if ((advisory.Identifiers.Cve is null || advisory.Identifiers.Cve.Count == 0) &&
(advisory.Identifiers.Aliases is null || advisory.Identifiers.Aliases.Count == 0))
{
return TrivyAdvisoryValidationResult.Invalid(
TrivyAdapterErrors.InvalidAdvisory,
"Advisory must have at least one CVE or alias identifier.");
}
// Validate source
if (string.IsNullOrWhiteSpace(advisory.Source.Vendor))
{
return TrivyAdvisoryValidationResult.Invalid(
TrivyAdapterErrors.InvalidAdvisory,
"Advisory source vendor is required.");
}
// Check namespace support
if (!IsNamespaceSupported(advisory.Source.Vendor, advisory.Source.Product))
{
// Check if any affected package has a supported ecosystem
var hasSupported = advisory.Affects?.Any(a =>
IsEcosystemSupported(a.Package.Ecosystem)) ?? false;
if (!hasSupported)
{
return TrivyAdvisoryValidationResult.Invalid(
TrivyAdapterErrors.UnsupportedNamespace,
$"Namespace '{advisory.Source.Vendor}' is not supported.");
}
}
// Warn about missing severity
if (advisory.Severity?.Normalized is null && (advisory.Cvss is null || advisory.Cvss.Count == 0))
{
warnings.Add("Advisory has no severity or CVSS; will use UNKNOWN severity.");
}
return TrivyAdvisoryValidationResult.Valid(warnings.Count > 0 ? warnings : null);
}
/// <inheritdoc />
public bool IsNamespaceSupported(string? vendor, string? product)
{
var result = _namespaceMapper.MapNamespace(vendor, product);
return result is not null;
}
/// <inheritdoc />
public bool IsEcosystemSupported(string? ecosystem)
{
var result = _namespaceMapper.MapEcosystem(ecosystem);
return result is not null;
}
private TrivyNamespaceResult? ResolveNamespace(TrivyAdapterSource source, TrivyAdapterPackage package)
{
// Try ecosystem first for language packages
if (!string.IsNullOrWhiteSpace(package.Ecosystem))
{
var ecosystemResult = _namespaceMapper.MapEcosystem(package.Ecosystem);
if (ecosystemResult is not null)
{
return ecosystemResult;
}
}
// Fall back to source vendor/product
return _namespaceMapper.MapNamespace(source.Vendor, source.Product);
}
private TrivyVulnerability CreateVulnerability(TrivyAdapterInputAdvisory advisory)
{
var primaryId = advisory.Identifiers.Cve?.FirstOrDefault()
?? advisory.Identifiers.Aliases?.FirstOrDefault()
?? "UNKNOWN";
var title = advisory.Summary;
var description = advisory.Description;
// Truncate title and move excess to description
if (title is not null && title.Length > _options.MaxTitleLength)
{
var overflow = title[_options.MaxTitleLength..];
title = title[.._options.MaxTitleLength];
description = string.IsNullOrEmpty(description)
? overflow
: $"{overflow}\n\n{description}";
}
// Normalize line endings
if (description is not null)
{
description = description.Replace("\r\n", "\n").Replace("\r", "\n");
}
// Map severity
var severity = TrivySeverityMapper.MapSeverity(advisory.Severity?.Normalized);
// If no severity, try to derive from CVSS
if (severity == TrivySeverities.Unknown && advisory.Cvss?.Count > 0)
{
var maxScore = advisory.Cvss.Max(c => c.Score);
severity = TrivySeverityMapper.SeverityFromCvssScore(maxScore);
}
// Build CVSS list (with truncation)
var cvss = advisory.Cvss?
.Take(_options.MaxCvssVectorsPerEntry)
.Select(c => new TrivyCvss
{
Vector = c.Vector,
Score = c.Score,
Source = c.Source,
Version = c.Version
})
.ToList();
// Build references
var references = new List<string>();
if (advisory.References is not null)
{
references.AddRange(advisory.References);
}
// Add non-CVE aliases to references
if (advisory.Identifiers.Aliases is not null)
{
foreach (var alias in advisory.Identifiers.Aliases)
{
if (!alias.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase) &&
!references.Contains(alias))
{
references.Add(alias);
}
}
}
return new TrivyVulnerability
{
Id = primaryId,
CveIds = advisory.Identifiers.Cve?.ToList(),
CweIds = advisory.Identifiers.Cwe?.ToList(),
Title = title,
Description = description,
Severity = severity,
Cvss = cvss?.Count > 0 ? cvss : null,
References = references.Count > 0 ? references.Distinct().ToList() : null,
PublishedDate = advisory.Published,
LastModifiedDate = advisory.Modified,
VendorSeverity = advisory.Severity?.Vendor,
VendorVectors = advisory.VendorStatement
};
}
private static TrivyPackage CreatePackage(TrivyAdapterAffected affected)
{
var pkg = affected.Package;
var fixedVersion = affected.Remediations?.FirstOrDefault()?.FixedVersion;
var links = affected.Remediations?
.SelectMany(r => r.Urls ?? Enumerable.Empty<string>())
.Distinct()
.ToList();
// Determine version string
var version = pkg.Nevra ?? pkg.Evr ?? affected.VulnerableRange ?? pkg.Version;
return new TrivyPackage
{
Name = pkg.Name,
Version = version,
FixedVersion = fixedVersion,
Ecosystem = pkg.Ecosystem,
VulnerableVersionRange = affected.VulnerableRange,
Purl = pkg.Purl,
Cpes = affected.States?.Cpes?.ToList(),
Links = links?.Count > 0 ? links : null
};
}
private TrivyDbMetadata CreateMetadata(TrivyAdapterContext context)
{
return new TrivyDbMetadata
{
SchemaVersion = _options.SchemaVersion,
BuildInfo = new TrivyBuildInfo
{
TrivyVersion = context.TrivyVersion,
VulnerabilityDbVersion = context.GeneratedAt.ToString("yyyy-MM-ddTHH:mm:ssZ")
},
UpdatedAt = context.GeneratedAt,
Stella = new TrivyStellaBlock
{
RunId = context.RunId,
ProfileId = context.ProfileId,
Tenant = context.TenantId,
PolicySnapshotId = context.PolicySnapshotId,
SchemaVersion = _options.SchemaVersion,
GeneratedAt = context.GeneratedAt
}
};
}
private static string GetDeduplicationKey(TrivyVulnerabilityRecord record)
{
return $"{record.Namespace}|{record.Package.Name}|{record.Package.VulnerableVersionRange ?? record.Package.Version ?? ""}|{record.Vulnerability.Id}";
}
private sealed class TransformStats
{
public int TotalInput;
public int SkippedInvalid;
public int Duplicates;
}
}

View File

@@ -0,0 +1,70 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Dependency injection extensions for Trivy DB adapters.
/// </summary>
public static class TrivyDbAdapterServiceCollectionExtensions
{
/// <summary>
/// Adds Trivy DB adapter services (core and Java DB).
/// </summary>
public static IServiceCollection AddTrivyDbAdapters(
this IServiceCollection services,
IConfiguration configuration)
{
services.Configure<TrivyAdapterOptions>(
configuration.GetSection(TrivyAdapterOptions.SectionName));
services.AddSingleton<ITrivyDbAdapter, TrivyDbAdapter>();
services.AddSingleton<ITrivyJavaDbAdapter, TrivyJavaDbAdapter>();
return services;
}
/// <summary>
/// Adds Trivy DB adapter services with custom options.
/// </summary>
public static IServiceCollection AddTrivyDbAdapters(
this IServiceCollection services,
Action<TrivyAdapterOptions> configureOptions)
{
services.Configure(configureOptions);
services.AddSingleton<ITrivyDbAdapter, TrivyDbAdapter>();
services.AddSingleton<ITrivyJavaDbAdapter, TrivyJavaDbAdapter>();
return services;
}
/// <summary>
/// Adds only the core Trivy DB adapter (without Java DB).
/// </summary>
public static IServiceCollection AddTrivyDbAdapter(
this IServiceCollection services,
IConfiguration configuration)
{
services.Configure<TrivyAdapterOptions>(
configuration.GetSection(TrivyAdapterOptions.SectionName));
services.AddSingleton<ITrivyDbAdapter, TrivyDbAdapter>();
return services;
}
/// <summary>
/// Adds only the Java DB adapter.
/// </summary>
public static IServiceCollection AddTrivyJavaDbAdapter(
this IServiceCollection services,
IConfiguration configuration)
{
services.Configure<TrivyAdapterOptions>(
configuration.GetSection(TrivyAdapterOptions.SectionName));
services.AddSingleton<ITrivyJavaDbAdapter, TrivyJavaDbAdapter>();
return services;
}
}

View File

@@ -0,0 +1,198 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Trivy database metadata.json structure.
/// </summary>
public sealed record TrivyDbMetadata
{
[JsonPropertyName("schemaVersion")]
public required int SchemaVersion { get; init; }
[JsonPropertyName("buildInfo")]
public required TrivyBuildInfo BuildInfo { get; init; }
[JsonPropertyName("updatedAt")]
public required DateTimeOffset UpdatedAt { get; init; }
[JsonPropertyName("stella")]
public TrivyStellaBlock? Stella { get; init; }
}
/// <summary>
/// Build information block.
/// </summary>
public sealed record TrivyBuildInfo
{
[JsonPropertyName("trivyVersion")]
public required string TrivyVersion { get; init; }
[JsonPropertyName("vulnerabilityDBVersion")]
public required string VulnerabilityDbVersion { get; init; }
}
/// <summary>
/// StellaOps metadata block embedded in metadata.json.
/// </summary>
public sealed record TrivyStellaBlock
{
[JsonPropertyName("runId")]
public required string RunId { get; init; }
[JsonPropertyName("profileId")]
public required string ProfileId { get; init; }
[JsonPropertyName("tenant")]
public required string Tenant { get; init; }
[JsonPropertyName("policySnapshotId")]
public string? PolicySnapshotId { get; init; }
[JsonPropertyName("schemaVersion")]
public int SchemaVersion { get; init; }
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
}
/// <summary>
/// Trivy vulnerability entry.
/// </summary>
public sealed record TrivyVulnerability
{
[JsonPropertyName("ID")]
public required string Id { get; init; }
[JsonPropertyName("CVEIDs")]
public IReadOnlyList<string>? CveIds { get; init; }
[JsonPropertyName("CWEIDs")]
public IReadOnlyList<string>? CweIds { get; init; }
[JsonPropertyName("Title")]
public string? Title { get; init; }
[JsonPropertyName("Description")]
public string? Description { get; init; }
[JsonPropertyName("Severity")]
public required string Severity { get; init; }
[JsonPropertyName("CVSS")]
public IReadOnlyList<TrivyCvss>? Cvss { get; init; }
[JsonPropertyName("References")]
public IReadOnlyList<string>? References { get; init; }
[JsonPropertyName("PublishedDate")]
public DateTimeOffset? PublishedDate { get; init; }
[JsonPropertyName("LastModifiedDate")]
public DateTimeOffset? LastModifiedDate { get; init; }
[JsonPropertyName("VendorSeverity")]
public string? VendorSeverity { get; init; }
[JsonPropertyName("VendorVectors")]
public string? VendorVectors { get; init; }
}
/// <summary>
/// Trivy CVSS score entry.
/// </summary>
public sealed record TrivyCvss
{
[JsonPropertyName("vector")]
public required string Vector { get; init; }
[JsonPropertyName("score")]
public double Score { get; init; }
[JsonPropertyName("source")]
public string? Source { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
}
/// <summary>
/// Trivy package entry.
/// </summary>
public sealed record TrivyPackage
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("fixedVersion")]
public string? FixedVersion { get; init; }
[JsonPropertyName("ecosystem")]
public string? Ecosystem { get; init; }
[JsonPropertyName("vulnerableVersionRange")]
public string? VulnerableVersionRange { get; init; }
[JsonPropertyName("PURL")]
public string? Purl { get; init; }
[JsonPropertyName("cpes")]
public IReadOnlyList<string>? Cpes { get; init; }
[JsonPropertyName("links")]
public IReadOnlyList<string>? Links { get; init; }
}
/// <summary>
/// Complete Trivy vulnerability record with package context.
/// </summary>
public sealed record TrivyVulnerabilityRecord
{
[JsonPropertyName("namespace")]
public required string Namespace { get; init; }
[JsonPropertyName("package")]
public required TrivyPackage Package { get; init; }
[JsonPropertyName("vulnerability")]
public required TrivyVulnerability Vulnerability { get; init; }
}
/// <summary>
/// Result of Trivy adapter transformation.
/// </summary>
public sealed record TrivyAdapterResult
{
/// <summary>
/// Transformed vulnerability records.
/// </summary>
public required IReadOnlyList<TrivyVulnerabilityRecord> Records { get; init; }
/// <summary>
/// Metadata for the export.
/// </summary>
public required TrivyDbMetadata Metadata { get; init; }
/// <summary>
/// Number of records skipped due to unsupported namespace.
/// </summary>
public int SkippedUnsupportedNamespace { get; init; }
/// <summary>
/// Number of records skipped due to invalid data.
/// </summary>
public int SkippedInvalidData { get; init; }
/// <summary>
/// Number of duplicate records removed.
/// </summary>
public int DuplicatesRemoved { get; init; }
/// <summary>
/// Total input records processed.
/// </summary>
public int TotalInputRecords { get; init; }
}

View File

@@ -0,0 +1,433 @@
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.WebService.Adapters.Trivy;
/// <summary>
/// Trivy Java DB adapter implementation for Maven, Gradle, and SBT ecosystems.
/// </summary>
public sealed partial class TrivyJavaDbAdapter : ITrivyJavaDbAdapter
{
private readonly TrivyAdapterOptions _options;
private readonly ILogger<TrivyJavaDbAdapter> _logger;
public string Name => "trivy:java-db";
public string AdapterId => "adapter:trivy:java-db";
public TrivySchemaVersion SchemaVersion => (TrivySchemaVersion)_options.SchemaVersion;
public IReadOnlySet<string> SupportedEcosystems { get; }
public TrivyJavaDbAdapter(
IOptions<TrivyAdapterOptions> options,
ILogger<TrivyJavaDbAdapter> logger)
{
_options = options.Value;
_logger = logger;
SupportedEcosystems = _options.JavaEcosystems;
}
/// <inheritdoc />
public void ValidateConfiguration()
{
if (_options.SchemaVersion != 2)
{
throw TrivyAdapterException.UnsupportedSchema(_options.SchemaVersion);
}
if (!_options.IncludeJavaDb)
{
_logger.LogWarning("Java DB adapter called but IncludeJavaDb is disabled in configuration");
}
}
/// <inheritdoc />
public async Task<TrivyJavaAdapterResult> TransformAsync(
IAsyncEnumerable<TrivyAdapterInputAdvisory> advisories,
TrivyAdapterContext context,
CancellationToken cancellationToken = default)
{
ValidateConfiguration();
var records = new List<TrivyJavaVulnerabilityRecord>();
var seenKeys = new HashSet<string>(StringComparer.Ordinal);
var stats = new TransformStats();
await foreach (var advisory in advisories.WithCancellation(cancellationToken))
{
stats.TotalInput++;
if (!HasJavaPackages(advisory))
{
stats.SkippedNonJava++;
continue;
}
var transformed = TransformAdvisory(advisory);
foreach (var record in transformed)
{
if (string.IsNullOrEmpty(record.Package.GroupId) ||
string.IsNullOrEmpty(record.Package.ArtifactId))
{
stats.SkippedMissingCoordinates++;
continue;
}
var key = GetDeduplicationKey(record);
if (seenKeys.Add(key))
{
records.Add(record);
}
else
{
stats.Duplicates++;
}
}
}
// Sort records for deterministic output
var sortedRecords = records
.OrderBy(r => r.Namespace, StringComparer.Ordinal)
.ThenBy(r => r.Package.GroupId, StringComparer.Ordinal)
.ThenBy(r => r.Package.ArtifactId, StringComparer.Ordinal)
.ThenBy(r => r.Vulnerability.Id, StringComparer.Ordinal)
.ToList();
var metadata = CreateMetadata(context);
_logger.LogInformation(
"Trivy Java DB adapter transformed {OutputCount} records from {InputCount} advisories " +
"(skipped: {SkippedNonJava} non-Java, {SkippedMissingCoords} missing coordinates, {Duplicates} duplicates)",
sortedRecords.Count,
stats.TotalInput,
stats.SkippedNonJava,
stats.SkippedMissingCoordinates,
stats.Duplicates);
return new TrivyJavaAdapterResult
{
Records = sortedRecords,
Metadata = metadata,
TotalInputRecords = stats.TotalInput,
SkippedNonJavaEcosystem = stats.SkippedNonJava,
SkippedMissingCoordinates = stats.SkippedMissingCoordinates,
DuplicatesRemoved = stats.Duplicates
};
}
/// <inheritdoc />
public IReadOnlyList<TrivyJavaVulnerabilityRecord> TransformAdvisory(TrivyAdapterInputAdvisory advisory)
{
var records = new List<TrivyJavaVulnerabilityRecord>();
if (advisory.Affects is null || advisory.Affects.Count == 0)
{
return records;
}
foreach (var affected in advisory.Affects)
{
var ecosystem = affected.Package.Ecosystem?.ToLowerInvariant();
if (ecosystem is null || !SupportedEcosystems.Contains(ecosystem))
{
continue;
}
var coordinates = ParseMavenCoordinates(affected.Package.Name, affected.Package.Purl);
if (coordinates is null)
{
// Try to use group/artifact directly if provided
if (!string.IsNullOrEmpty(affected.Package.Group) &&
!string.IsNullOrEmpty(affected.Package.Artifact))
{
coordinates = new MavenCoordinates(
affected.Package.Group,
affected.Package.Artifact,
affected.Package.Version);
}
else
{
_logger.LogDebug(
"Could not parse Maven coordinates for package {PackageName}",
affected.Package.Name);
continue;
}
}
var vulnerability = CreateVulnerability(advisory);
var package = CreateJavaPackage(affected, coordinates);
records.Add(new TrivyJavaVulnerabilityRecord
{
Namespace = ecosystem,
Package = package,
Vulnerability = vulnerability
});
}
return records;
}
/// <inheritdoc />
public bool HasJavaPackages(TrivyAdapterInputAdvisory advisory)
{
if (advisory.Affects is null || advisory.Affects.Count == 0)
{
return false;
}
return advisory.Affects.Any(a =>
a.Package.Ecosystem is not null &&
SupportedEcosystems.Contains(a.Package.Ecosystem.ToLowerInvariant()));
}
/// <inheritdoc />
public MavenCoordinates? ParseMavenCoordinates(string? packageName, string? purl)
{
// Try PURL first (most reliable)
if (!string.IsNullOrWhiteSpace(purl))
{
var purlCoords = ParsePurl(purl);
if (purlCoords is not null)
{
return purlCoords;
}
}
// Try package name in group:artifact format
if (!string.IsNullOrWhiteSpace(packageName))
{
var parts = packageName.Split(':');
if (parts.Length >= 2)
{
return new MavenCoordinates(
parts[0].Trim(),
parts[1].Trim(),
parts.Length > 2 ? parts[2].Trim() : null);
}
// Try package name in group/artifact format (Gradle style)
parts = packageName.Split('/');
if (parts.Length >= 2)
{
return new MavenCoordinates(
parts[0].Trim(),
parts[1].Trim(),
parts.Length > 2 ? parts[2].Trim() : null);
}
}
return null;
}
private MavenCoordinates? ParsePurl(string purl)
{
// PURL format: pkg:maven/group/artifact@version
// or: pkg:maven/group%2Fsubgroup/artifact@version
var match = MavenPurlPattern().Match(purl);
if (!match.Success)
{
return null;
}
var groupId = Uri.UnescapeDataString(match.Groups["group"].Value);
var artifactId = Uri.UnescapeDataString(match.Groups["artifact"].Value);
var version = match.Groups["version"].Success
? Uri.UnescapeDataString(match.Groups["version"].Value)
: null;
return new MavenCoordinates(groupId, artifactId, version);
}
private TrivyVulnerability CreateVulnerability(TrivyAdapterInputAdvisory advisory)
{
var primaryId = advisory.Identifiers.Cve?.FirstOrDefault()
?? advisory.Identifiers.Aliases?.FirstOrDefault()
?? "UNKNOWN";
var title = advisory.Summary;
var description = advisory.Description;
// Truncate title
if (title is not null && title.Length > _options.MaxTitleLength)
{
var overflow = title[_options.MaxTitleLength..];
title = title[.._options.MaxTitleLength];
description = string.IsNullOrEmpty(description)
? overflow
: $"{overflow}\n\n{description}";
}
// Normalize line endings
if (description is not null)
{
description = description.Replace("\r\n", "\n").Replace("\r", "\n");
}
// Map severity
var severity = TrivySeverityMapper.MapSeverity(advisory.Severity?.Normalized);
// If no severity, try to derive from CVSS
if (severity == TrivySeverities.Unknown && advisory.Cvss?.Count > 0)
{
var maxScore = advisory.Cvss.Max(c => c.Score);
severity = TrivySeverityMapper.SeverityFromCvssScore(maxScore);
}
// Build CVSS list (with truncation)
var cvss = advisory.Cvss?
.Take(_options.MaxCvssVectorsPerEntry)
.Select(c => new TrivyCvss
{
Vector = c.Vector,
Score = c.Score,
Source = c.Source,
Version = c.Version
})
.ToList();
// Build references
var references = advisory.References?.ToList() ?? [];
return new TrivyVulnerability
{
Id = primaryId,
CveIds = advisory.Identifiers.Cve?.ToList(),
CweIds = advisory.Identifiers.Cwe?.ToList(),
Title = title,
Description = description,
Severity = severity,
Cvss = cvss?.Count > 0 ? cvss : null,
References = references.Count > 0 ? references.Distinct().ToList() : null,
PublishedDate = advisory.Published,
LastModifiedDate = advisory.Modified,
VendorSeverity = advisory.Severity?.Vendor
};
}
private static TrivyJavaPackage CreateJavaPackage(
TrivyAdapterAffected affected,
MavenCoordinates coordinates)
{
var fixedVersion = affected.Remediations?.FirstOrDefault()?.FixedVersion;
var links = affected.Remediations?
.SelectMany(r => r.Urls ?? Enumerable.Empty<string>())
.Distinct()
.ToList();
// Convert vulnerable range to Maven version range format
var vulnerableVersions = new List<string>();
if (!string.IsNullOrEmpty(affected.VulnerableRange))
{
vulnerableVersions.Add(ConvertToMavenVersionRange(affected.VulnerableRange));
}
return new TrivyJavaPackage
{
Name = $"{coordinates.GroupId}:{coordinates.ArtifactId}",
GroupId = coordinates.GroupId,
ArtifactId = coordinates.ArtifactId,
Version = coordinates.Version ?? affected.Package.Version,
FixedVersion = fixedVersion,
VulnerableVersions = vulnerableVersions.Count > 0 ? vulnerableVersions : null,
Ecosystem = affected.Package.Ecosystem?.ToLowerInvariant() ?? "maven",
Purl = affected.Package.Purl,
Links = links?.Count > 0 ? links : null
};
}
private static string ConvertToMavenVersionRange(string range)
{
// Convert common version range formats to Maven format
// Examples: "< 1.2.3" -> "(,1.2.3)"
// ">= 1.0.0, < 2.0.0" -> "[1.0.0,2.0.0)"
// "= 1.5.0" -> "[1.5.0]"
range = range.Trim();
// Already in Maven format
if (range.StartsWith('[') || range.StartsWith('('))
{
return range;
}
// Simple less than
if (range.StartsWith("< ") || range.StartsWith("<"))
{
var version = range.TrimStart('<', ' ');
return $"(,{version})";
}
// Simple less than or equal
if (range.StartsWith("<= ") || range.StartsWith("<="))
{
var version = range.TrimStart('<', '=', ' ');
return $"(,{version}]";
}
// Simple greater than
if (range.StartsWith("> ") || range.StartsWith(">"))
{
var version = range.TrimStart('>', ' ');
return $"({version},)";
}
// Simple greater than or equal
if (range.StartsWith(">= ") || range.StartsWith(">="))
{
var version = range.TrimStart('>', '=', ' ');
return $"[{version},)";
}
// Exact version
if (range.StartsWith("= ") || range.StartsWith("=="))
{
var version = range.TrimStart('=', ' ');
return $"[{version}]";
}
// Return as-is if no conversion needed
return range;
}
private TrivyJavaDbMetadata CreateMetadata(TrivyAdapterContext context)
{
return new TrivyJavaDbMetadata
{
SchemaVersion = _options.SchemaVersion,
BuildInfo = new TrivyBuildInfo
{
TrivyVersion = context.TrivyVersion,
VulnerabilityDbVersion = context.GeneratedAt.ToString("yyyy-MM-ddTHH:mm:ssZ")
},
UpdatedAt = context.GeneratedAt,
Ecosystems = ["maven", "gradle", "sbt"],
Stella = new TrivyStellaBlock
{
RunId = context.RunId,
ProfileId = context.ProfileId,
Tenant = context.TenantId,
PolicySnapshotId = context.PolicySnapshotId,
SchemaVersion = _options.SchemaVersion,
GeneratedAt = context.GeneratedAt
}
};
}
private static string GetDeduplicationKey(TrivyJavaVulnerabilityRecord record)
{
return $"{record.Namespace}|{record.Package.GroupId}|{record.Package.ArtifactId}|{record.Vulnerability.Id}";
}
[GeneratedRegex(@"^pkg:maven/(?<group>[^/]+)/(?<artifact>[^@]+)(?:@(?<version>.+))?$")]
private static partial Regex MavenPurlPattern();
private sealed class TransformStats
{
public int TotalInput;
public int SkippedNonJava;
public int SkippedMissingCoordinates;
public int Duplicates;
}
}

Some files were not shown because too many files have changed in this diff Show More