doctor enhancements, setup, enhancements, ui functionality and design consolidation and , test projects fixes , product advisory attestation/rekor and delta verfications enhancements
This commit is contained in:
@@ -0,0 +1,277 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ArtifactIndexRepository.cs
|
||||
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
|
||||
// Task: AS-003 - Create ArtifactStore PostgreSQL index
|
||||
// Description: PostgreSQL-backed artifact index for efficient querying
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Artifact.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL repository for artifact index.
|
||||
/// Provides efficient bom-ref based querying.
|
||||
/// </summary>
|
||||
public interface IArtifactIndexRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Indexes a stored artifact.
|
||||
/// </summary>
|
||||
Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Finds artifacts by bom-ref.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Finds artifacts by bom-ref and serial number.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a specific artifact index entry.
|
||||
/// </summary>
|
||||
Task<ArtifactIndexEntry?> GetAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Removes an artifact from the index.
|
||||
/// </summary>
|
||||
Task<bool> RemoveAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Finds artifacts by SHA-256 hash.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Finds artifacts by type.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
|
||||
ArtifactType type,
|
||||
Guid tenantId,
|
||||
int limit = 100,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Artifact index entry for PostgreSQL storage.
|
||||
/// </summary>
|
||||
public sealed record ArtifactIndexEntry
|
||||
{
|
||||
/// <summary>Primary key.</summary>
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>Tenant ID.</summary>
|
||||
public required Guid TenantId { get; init; }
|
||||
|
||||
/// <summary>Package URL or bom-ref.</summary>
|
||||
public required string BomRef { get; init; }
|
||||
|
||||
/// <summary>CycloneDX serialNumber.</summary>
|
||||
public required string SerialNumber { get; init; }
|
||||
|
||||
/// <summary>Artifact ID.</summary>
|
||||
public required string ArtifactId { get; init; }
|
||||
|
||||
/// <summary>Full storage key/path.</summary>
|
||||
public required string StorageKey { get; init; }
|
||||
|
||||
/// <summary>Artifact type.</summary>
|
||||
public required ArtifactType Type { get; init; }
|
||||
|
||||
/// <summary>Content type (MIME).</summary>
|
||||
public required string ContentType { get; init; }
|
||||
|
||||
/// <summary>SHA-256 hash.</summary>
|
||||
public required string Sha256 { get; init; }
|
||||
|
||||
/// <summary>Size in bytes.</summary>
|
||||
public required long SizeBytes { get; init; }
|
||||
|
||||
/// <summary>When the artifact was stored.</summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>When the index entry was last updated.</summary>
|
||||
public DateTimeOffset? UpdatedAt { get; init; }
|
||||
|
||||
/// <summary>Whether the artifact has been deleted.</summary>
|
||||
public bool IsDeleted { get; init; }
|
||||
|
||||
/// <summary>Deletion timestamp.</summary>
|
||||
public DateTimeOffset? DeletedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryArtifactIndexRepository : IArtifactIndexRepository
|
||||
{
|
||||
private readonly List<ArtifactIndexEntry> _entries = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
// Remove existing entry if present
|
||||
_entries.RemoveAll(e =>
|
||||
e.BomRef == entry.BomRef &&
|
||||
e.SerialNumber == entry.SerialNumber &&
|
||||
e.ArtifactId == entry.ArtifactId);
|
||||
|
||||
_entries.Add(entry);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _entries
|
||||
.Where(e => e.BomRef == bomRef && !e.IsDeleted)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _entries
|
||||
.Where(e => e.BomRef == bomRef && e.SerialNumber == serialNumber && !e.IsDeleted)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ArtifactIndexEntry?> GetAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var entry = _entries.FirstOrDefault(e =>
|
||||
e.BomRef == bomRef &&
|
||||
e.SerialNumber == serialNumber &&
|
||||
e.ArtifactId == artifactId &&
|
||||
!e.IsDeleted);
|
||||
return Task.FromResult(entry);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<bool> RemoveAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var entry = _entries.FirstOrDefault(e =>
|
||||
e.BomRef == bomRef &&
|
||||
e.SerialNumber == serialNumber &&
|
||||
e.ArtifactId == artifactId &&
|
||||
!e.IsDeleted);
|
||||
|
||||
if (entry != null)
|
||||
{
|
||||
var index = _entries.IndexOf(entry);
|
||||
_entries[index] = entry with
|
||||
{
|
||||
IsDeleted = true,
|
||||
DeletedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _entries
|
||||
.Where(e => e.Sha256 == sha256 && !e.IsDeleted)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
|
||||
ArtifactType type,
|
||||
Guid tenantId,
|
||||
int limit = 100,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _entries
|
||||
.Where(e => e.Type == type && e.TenantId == tenantId && !e.IsDeleted)
|
||||
.Take(limit)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL artifact index table schema.
|
||||
/// </summary>
|
||||
public static class ArtifactIndexSchema
|
||||
{
|
||||
/// <summary>
|
||||
/// SQL migration to create the artifact index table.
|
||||
/// </summary>
|
||||
public const string CreateTableSql = """
|
||||
CREATE TABLE IF NOT EXISTS evidence.artifact_index (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
bom_ref TEXT NOT NULL,
|
||||
serial_number TEXT NOT NULL,
|
||||
artifact_id TEXT NOT NULL,
|
||||
storage_key TEXT NOT NULL,
|
||||
artifact_type TEXT NOT NULL,
|
||||
content_type TEXT NOT NULL,
|
||||
sha256 TEXT NOT NULL,
|
||||
size_bytes BIGINT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ,
|
||||
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
|
||||
CONSTRAINT uq_artifact_index_key UNIQUE (tenant_id, bom_ref, serial_number, artifact_id)
|
||||
);
|
||||
|
||||
-- Index for bom-ref queries (most common)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_bom_ref
|
||||
ON evidence.artifact_index (tenant_id, bom_ref)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for SHA-256 lookups (deduplication)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_sha256
|
||||
ON evidence.artifact_index (sha256)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for type-based queries
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_type
|
||||
ON evidence.artifact_index (tenant_id, artifact_type)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for serial number + bom-ref compound queries
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_serial
|
||||
ON evidence.artifact_index (tenant_id, bom_ref, serial_number)
|
||||
WHERE NOT is_deleted;
|
||||
""";
|
||||
}
|
||||
@@ -0,0 +1,407 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ArtifactMigrationService.cs
|
||||
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
|
||||
// Task: AS-006 - Migrate existing evidence to unified store
|
||||
// Description: Migrates existing evidence from legacy paths to unified store
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Runtime.CompilerServices;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Artifact.Core;
|
||||
|
||||
namespace StellaOps.Artifact.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Migration options.
|
||||
/// </summary>
|
||||
public sealed class ArtifactMigrationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum number of parallel migrations.
|
||||
/// </summary>
|
||||
public int MaxParallelism { get; set; } = 4;
|
||||
|
||||
/// <summary>
|
||||
/// Batch size for processing.
|
||||
/// </summary>
|
||||
public int BatchSize { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to copy (preserve original) or move.
|
||||
/// </summary>
|
||||
public bool CopyMode { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Skip artifacts that already exist in the unified store.
|
||||
/// </summary>
|
||||
public bool SkipExisting { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to write a migration log.
|
||||
/// </summary>
|
||||
public bool EnableLogging { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Progress report for migration.
|
||||
/// </summary>
|
||||
public sealed record MigrationProgress
|
||||
{
|
||||
public int TotalItems { get; init; }
|
||||
public int ProcessedItems { get; init; }
|
||||
public int SuccessCount { get; init; }
|
||||
public int FailureCount { get; init; }
|
||||
public int SkippedCount { get; init; }
|
||||
public DateTimeOffset StartedAt { get; init; }
|
||||
public DateTimeOffset LastUpdateAt { get; init; }
|
||||
public string CurrentItem { get; init; } = string.Empty;
|
||||
public TimeSpan EstimatedRemaining => ProcessedItems > 0
|
||||
? TimeSpan.FromSeconds((TotalItems - ProcessedItems) * (LastUpdateAt - StartedAt).TotalSeconds / ProcessedItems)
|
||||
: TimeSpan.Zero;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of migrating a single artifact.
|
||||
/// </summary>
|
||||
public sealed record ArtifactMigrationResult
|
||||
{
|
||||
public required string OriginalPath { get; init; }
|
||||
public required string? NewPath { get; init; }
|
||||
public required bool Success { get; init; }
|
||||
public required bool Skipped { get; init; }
|
||||
public string? BomRef { get; init; }
|
||||
public string? SerialNumber { get; init; }
|
||||
public string? ErrorMessage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Legacy artifact source for migration.
|
||||
/// </summary>
|
||||
public interface ILegacyArtifactSource
|
||||
{
|
||||
/// <summary>
|
||||
/// Enumerates all artifacts in the legacy store.
|
||||
/// </summary>
|
||||
IAsyncEnumerable<LegacyArtifact> EnumerateAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total count of artifacts.
|
||||
/// </summary>
|
||||
Task<int> CountAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Reads content from a legacy path.
|
||||
/// </summary>
|
||||
Task<Stream?> ReadAsync(string legacyPath, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Legacy artifact descriptor.
|
||||
/// </summary>
|
||||
public sealed record LegacyArtifact
|
||||
{
|
||||
public required string LegacyPath { get; init; }
|
||||
public required string ContentType { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public Guid TenantId { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for migrating legacy evidence to unified artifact store.
|
||||
/// </summary>
|
||||
public sealed class ArtifactMigrationService
|
||||
{
|
||||
private readonly IArtifactStore _targetStore;
|
||||
private readonly ILegacyArtifactSource _source;
|
||||
private readonly ICycloneDxExtractor _extractor;
|
||||
private readonly ArtifactMigrationOptions _options;
|
||||
private readonly ILogger<ArtifactMigrationService> _logger;
|
||||
|
||||
public ArtifactMigrationService(
|
||||
IArtifactStore targetStore,
|
||||
ILegacyArtifactSource source,
|
||||
ICycloneDxExtractor extractor,
|
||||
ArtifactMigrationOptions options,
|
||||
ILogger<ArtifactMigrationService> logger)
|
||||
{
|
||||
_targetStore = targetStore ?? throw new ArgumentNullException(nameof(targetStore));
|
||||
_source = source ?? throw new ArgumentNullException(nameof(source));
|
||||
_extractor = extractor ?? throw new ArgumentNullException(nameof(extractor));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs the migration asynchronously, reporting progress.
|
||||
/// </summary>
|
||||
public async IAsyncEnumerable<ArtifactMigrationResult> MigrateAsync(
|
||||
IProgress<MigrationProgress>? progress = null,
|
||||
[EnumeratorCancellation] CancellationToken ct = default)
|
||||
{
|
||||
var totalCount = await _source.CountAsync(ct).ConfigureAwait(false);
|
||||
var startedAt = DateTimeOffset.UtcNow;
|
||||
var processed = 0;
|
||||
var succeeded = 0;
|
||||
var failed = 0;
|
||||
var skipped = 0;
|
||||
|
||||
_logger.LogInformation("Starting migration of {Count} artifacts", totalCount);
|
||||
|
||||
var semaphore = new SemaphoreSlim(_options.MaxParallelism);
|
||||
var batch = new List<Task<ArtifactMigrationResult>>(_options.BatchSize);
|
||||
|
||||
await foreach (var legacy in _source.EnumerateAsync(ct).ConfigureAwait(false))
|
||||
{
|
||||
await semaphore.WaitAsync(ct).ConfigureAwait(false);
|
||||
|
||||
batch.Add(Task.Run(async () =>
|
||||
{
|
||||
try
|
||||
{
|
||||
return await MigrateOneAsync(legacy, ct).ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
semaphore.Release();
|
||||
}
|
||||
}, ct));
|
||||
|
||||
// Process batch when full
|
||||
if (batch.Count >= _options.BatchSize)
|
||||
{
|
||||
foreach (var result in await ProcessBatchAsync(batch))
|
||||
{
|
||||
processed++;
|
||||
if (result.Success && !result.Skipped) succeeded++;
|
||||
else if (result.Skipped) skipped++;
|
||||
else failed++;
|
||||
|
||||
progress?.Report(new MigrationProgress
|
||||
{
|
||||
TotalItems = totalCount,
|
||||
ProcessedItems = processed,
|
||||
SuccessCount = succeeded,
|
||||
FailureCount = failed,
|
||||
SkippedCount = skipped,
|
||||
StartedAt = startedAt,
|
||||
LastUpdateAt = DateTimeOffset.UtcNow,
|
||||
CurrentItem = result.OriginalPath
|
||||
});
|
||||
|
||||
yield return result;
|
||||
}
|
||||
|
||||
batch.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
// Process remaining
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
foreach (var result in await ProcessBatchAsync(batch))
|
||||
{
|
||||
processed++;
|
||||
if (result.Success && !result.Skipped) succeeded++;
|
||||
else if (result.Skipped) skipped++;
|
||||
else failed++;
|
||||
|
||||
progress?.Report(new MigrationProgress
|
||||
{
|
||||
TotalItems = totalCount,
|
||||
ProcessedItems = processed,
|
||||
SuccessCount = succeeded,
|
||||
FailureCount = failed,
|
||||
SkippedCount = skipped,
|
||||
StartedAt = startedAt,
|
||||
LastUpdateAt = DateTimeOffset.UtcNow,
|
||||
CurrentItem = result.OriginalPath
|
||||
});
|
||||
|
||||
yield return result;
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Migration completed: {Succeeded} succeeded, {Failed} failed, {Skipped} skipped out of {Total}",
|
||||
succeeded, failed, skipped, totalCount);
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<ArtifactMigrationResult>> ProcessBatchAsync(
|
||||
List<Task<ArtifactMigrationResult>> batch)
|
||||
{
|
||||
await Task.WhenAll(batch).ConfigureAwait(false);
|
||||
return batch.Select(t => t.Result).ToList();
|
||||
}
|
||||
|
||||
private async Task<ArtifactMigrationResult> MigrateOneAsync(LegacyArtifact legacy, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Read content from legacy store
|
||||
var stream = await _source.ReadAsync(legacy.LegacyPath, ct).ConfigureAwait(false);
|
||||
if (stream == null)
|
||||
{
|
||||
return new ArtifactMigrationResult
|
||||
{
|
||||
OriginalPath = legacy.LegacyPath,
|
||||
NewPath = null,
|
||||
Success = false,
|
||||
Skipped = false,
|
||||
ErrorMessage = "Content not found"
|
||||
};
|
||||
}
|
||||
|
||||
// Buffer the stream for multiple reads
|
||||
using var memoryStream = new MemoryStream();
|
||||
await stream.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
|
||||
await stream.DisposeAsync().ConfigureAwait(false);
|
||||
memoryStream.Position = 0;
|
||||
|
||||
// Try to extract bom-ref from content
|
||||
string bomRef;
|
||||
string serialNumber;
|
||||
|
||||
if (IsSbomContent(legacy.ContentType))
|
||||
{
|
||||
var metadata = await _extractor.ExtractAsync(memoryStream, ct).ConfigureAwait(false);
|
||||
memoryStream.Position = 0;
|
||||
|
||||
if (metadata.Success)
|
||||
{
|
||||
// Prefer purl, fallback to bom-ref
|
||||
bomRef = metadata.PrimaryPurl ?? metadata.PrimaryBomRef ?? GenerateFallbackBomRef(legacy);
|
||||
serialNumber = metadata.SerialNumber ?? GenerateFallbackSerial(legacy);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fallback for malformed SBOMs
|
||||
bomRef = GenerateFallbackBomRef(legacy);
|
||||
serialNumber = GenerateFallbackSerial(legacy);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Non-SBOM content: use legacy path to generate bom-ref
|
||||
bomRef = GenerateFallbackBomRef(legacy);
|
||||
serialNumber = GenerateFallbackSerial(legacy);
|
||||
}
|
||||
|
||||
// Generate artifact ID from legacy path
|
||||
var artifactId = GenerateArtifactId(legacy);
|
||||
|
||||
// Check if already exists
|
||||
if (_options.SkipExisting)
|
||||
{
|
||||
var exists = await _targetStore.ExistsAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
|
||||
if (exists)
|
||||
{
|
||||
return new ArtifactMigrationResult
|
||||
{
|
||||
OriginalPath = legacy.LegacyPath,
|
||||
NewPath = null,
|
||||
Success = true,
|
||||
Skipped = true,
|
||||
BomRef = bomRef,
|
||||
SerialNumber = serialNumber
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Store in unified store
|
||||
var storeRequest = new ArtifactStoreRequest
|
||||
{
|
||||
BomRef = bomRef,
|
||||
SerialNumber = serialNumber,
|
||||
ArtifactId = artifactId,
|
||||
Content = memoryStream,
|
||||
ContentType = legacy.ContentType,
|
||||
Type = InferArtifactType(legacy.ContentType, legacy.LegacyPath),
|
||||
TenantId = legacy.TenantId,
|
||||
Overwrite = false,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["legacy_path"] = legacy.LegacyPath,
|
||||
["migrated_at"] = DateTimeOffset.UtcNow.ToString("O")
|
||||
}
|
||||
};
|
||||
|
||||
var result = await _targetStore.StoreAsync(storeRequest, ct).ConfigureAwait(false);
|
||||
|
||||
return new ArtifactMigrationResult
|
||||
{
|
||||
OriginalPath = legacy.LegacyPath,
|
||||
NewPath = result.StorageKey,
|
||||
Success = result.Success,
|
||||
Skipped = false,
|
||||
BomRef = bomRef,
|
||||
SerialNumber = serialNumber,
|
||||
ErrorMessage = result.ErrorMessage
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to migrate {Path}", legacy.LegacyPath);
|
||||
|
||||
return new ArtifactMigrationResult
|
||||
{
|
||||
OriginalPath = legacy.LegacyPath,
|
||||
NewPath = null,
|
||||
Success = false,
|
||||
Skipped = false,
|
||||
ErrorMessage = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsSbomContent(string contentType)
|
||||
{
|
||||
return contentType.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)
|
||||
|| contentType.Contains("spdx", StringComparison.OrdinalIgnoreCase)
|
||||
|| contentType == "application/json"; // Assume JSON might be SBOM
|
||||
}
|
||||
|
||||
private static string GenerateFallbackBomRef(LegacyArtifact legacy)
|
||||
{
|
||||
// Generate a purl-like reference from the legacy path
|
||||
var sanitized = legacy.LegacyPath
|
||||
.Replace("\\", "/")
|
||||
.Replace("tenants/", "")
|
||||
.Replace("bundles/", "");
|
||||
|
||||
return $"pkg:stella/legacy/{Uri.EscapeDataString(sanitized)}";
|
||||
}
|
||||
|
||||
private static string GenerateFallbackSerial(LegacyArtifact legacy)
|
||||
{
|
||||
// Generate deterministic serial from path
|
||||
using var sha = System.Security.Cryptography.SHA256.Create();
|
||||
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(legacy.LegacyPath));
|
||||
var guid = new Guid(hash.Take(16).ToArray());
|
||||
return $"urn:uuid:{guid}";
|
||||
}
|
||||
|
||||
private static string GenerateArtifactId(LegacyArtifact legacy)
|
||||
{
|
||||
// Extract filename from path or generate UUID
|
||||
var fileName = Path.GetFileNameWithoutExtension(legacy.LegacyPath);
|
||||
return !string.IsNullOrEmpty(fileName) ? fileName : Guid.NewGuid().ToString();
|
||||
}
|
||||
|
||||
private static ArtifactType InferArtifactType(string contentType, string path)
|
||||
{
|
||||
if (contentType.Contains("cyclonedx") || contentType.Contains("spdx"))
|
||||
return ArtifactType.Sbom;
|
||||
if (contentType.Contains("vex") || contentType.Contains("openvex"))
|
||||
return ArtifactType.Vex;
|
||||
if (contentType.Contains("dsse") || path.Contains("dsse"))
|
||||
return ArtifactType.DsseEnvelope;
|
||||
if (path.Contains("rekor"))
|
||||
return ArtifactType.RekorProof;
|
||||
if (path.Contains("verdict"))
|
||||
return ArtifactType.Verdict;
|
||||
|
||||
return ArtifactType.Unknown;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,123 @@
|
||||
-- Artifact Index Schema Migration 001: Initial Schema
|
||||
-- Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
|
||||
-- Tasks: AS-003 - Create ArtifactStore PostgreSQL index
|
||||
-- Description: Creates the artifact index table for unified artifact storage
|
||||
|
||||
-- ============================================================================
|
||||
-- Schema Creation
|
||||
-- ============================================================================
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS evidence;
|
||||
|
||||
-- ============================================================================
|
||||
-- Artifact Index Table
|
||||
-- ============================================================================
|
||||
-- Indexes S3-stored artifacts for efficient bom-ref based querying.
|
||||
-- Supports content deduplication via SHA-256 and soft-delete for retention.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS evidence.artifact_index (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
bom_ref TEXT NOT NULL,
|
||||
serial_number TEXT NOT NULL,
|
||||
artifact_id TEXT NOT NULL,
|
||||
storage_key TEXT NOT NULL,
|
||||
artifact_type TEXT NOT NULL,
|
||||
content_type TEXT NOT NULL,
|
||||
sha256 TEXT NOT NULL,
|
||||
size_bytes BIGINT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ,
|
||||
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
|
||||
-- Unique constraint per tenant for artifact key
|
||||
CONSTRAINT uq_artifact_index_key UNIQUE (tenant_id, bom_ref, serial_number, artifact_id)
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Indexes
|
||||
-- ============================================================================
|
||||
|
||||
-- Index for bom-ref queries (most common query pattern)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_bom_ref
|
||||
ON evidence.artifact_index (tenant_id, bom_ref)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for SHA-256 lookups (content deduplication)
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_sha256
|
||||
ON evidence.artifact_index (sha256)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for type-based queries
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_type
|
||||
ON evidence.artifact_index (tenant_id, artifact_type)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for serial number + bom-ref compound queries
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_serial
|
||||
ON evidence.artifact_index (tenant_id, bom_ref, serial_number)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- Index for time-based queries
|
||||
CREATE INDEX IF NOT EXISTS idx_artifact_index_created
|
||||
ON evidence.artifact_index (tenant_id, created_at DESC)
|
||||
WHERE NOT is_deleted;
|
||||
|
||||
-- ============================================================================
|
||||
-- Row Level Security (RLS)
|
||||
-- ============================================================================
|
||||
|
||||
ALTER TABLE evidence.artifact_index ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Tenant isolation helper
|
||||
CREATE OR REPLACE FUNCTION evidence.require_current_tenant()
|
||||
RETURNS UUID
|
||||
LANGUAGE plpgsql STABLE SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
v_tenant TEXT;
|
||||
BEGIN
|
||||
v_tenant := current_setting('app.tenant_id', true);
|
||||
IF v_tenant IS NULL OR v_tenant = '' THEN
|
||||
RAISE EXCEPTION 'app.tenant_id session variable not set'
|
||||
USING HINT = 'Set via: SELECT set_config(''app.tenant_id'', ''<tenant>'', false)',
|
||||
ERRCODE = 'P0001';
|
||||
END IF;
|
||||
RETURN v_tenant::UUID;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- RLS policies
|
||||
CREATE POLICY artifact_index_tenant_isolation ON evidence.artifact_index
|
||||
USING (tenant_id = evidence.require_current_tenant());
|
||||
|
||||
CREATE POLICY artifact_index_insert_tenant ON evidence.artifact_index
|
||||
FOR INSERT
|
||||
WITH CHECK (tenant_id = evidence.require_current_tenant());
|
||||
|
||||
CREATE POLICY artifact_index_update_tenant ON evidence.artifact_index
|
||||
FOR UPDATE
|
||||
USING (tenant_id = evidence.require_current_tenant());
|
||||
|
||||
-- ============================================================================
|
||||
-- Comments
|
||||
-- ============================================================================
|
||||
|
||||
COMMENT ON TABLE evidence.artifact_index IS
|
||||
'Index of artifacts stored in S3 for efficient bom-ref based querying';
|
||||
|
||||
COMMENT ON COLUMN evidence.artifact_index.bom_ref IS
|
||||
'Package URL (purl) or CycloneDX bom-ref';
|
||||
|
||||
COMMENT ON COLUMN evidence.artifact_index.serial_number IS
|
||||
'CycloneDX serialNumber URN (urn:uuid:...)';
|
||||
|
||||
COMMENT ON COLUMN evidence.artifact_index.storage_key IS
|
||||
'Full S3 object key/path';
|
||||
|
||||
COMMENT ON COLUMN evidence.artifact_index.sha256 IS
|
||||
'SHA-256 hash for content deduplication';
|
||||
|
||||
COMMENT ON COLUMN evidence.artifact_index.artifact_type IS
|
||||
'Type classification: Sbom, Vex, DsseEnvelope, RekorProof, Verdict, etc.';
|
||||
@@ -0,0 +1,310 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresArtifactIndexRepository.cs
|
||||
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
|
||||
// Task: AS-003 - Create ArtifactStore PostgreSQL index
|
||||
// Description: PostgreSQL implementation of artifact index repository
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Artifact.Core;
|
||||
using StellaOps.Infrastructure.Postgres.Connections;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Artifact.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL data source for the Artifact module.
|
||||
/// </summary>
|
||||
public sealed class ArtifactDataSource : DataSourceBase
|
||||
{
|
||||
public const string DefaultSchemaName = "evidence";
|
||||
|
||||
public ArtifactDataSource(
|
||||
Microsoft.Extensions.Options.IOptions<StellaOps.Infrastructure.Postgres.Options.PostgresOptions> options,
|
||||
ILogger<ArtifactDataSource> logger)
|
||||
: base(CreateOptions(options.Value), logger)
|
||||
{
|
||||
}
|
||||
|
||||
protected override string ModuleName => "Artifact";
|
||||
|
||||
private static StellaOps.Infrastructure.Postgres.Options.PostgresOptions CreateOptions(
|
||||
StellaOps.Infrastructure.Postgres.Options.PostgresOptions baseOptions)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(baseOptions.SchemaName))
|
||||
{
|
||||
baseOptions.SchemaName = DefaultSchemaName;
|
||||
}
|
||||
return baseOptions;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IArtifactIndexRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresArtifactIndexRepository : RepositoryBase<ArtifactDataSource>, IArtifactIndexRepository
|
||||
{
|
||||
private readonly string _tenantId;
|
||||
|
||||
public PostgresArtifactIndexRepository(
|
||||
ArtifactDataSource dataSource,
|
||||
ILogger<PostgresArtifactIndexRepository> logger,
|
||||
string tenantId = "default")
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
_tenantId = tenantId;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO evidence.artifact_index (
|
||||
id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at
|
||||
) VALUES (
|
||||
@id, @tenant_id, @bom_ref, @serial_number, @artifact_id, @storage_key,
|
||||
@artifact_type, @content_type, @sha256, @size_bytes, @created_at
|
||||
)
|
||||
ON CONFLICT (tenant_id, bom_ref, serial_number, artifact_id)
|
||||
DO UPDATE SET
|
||||
storage_key = EXCLUDED.storage_key,
|
||||
artifact_type = EXCLUDED.artifact_type,
|
||||
content_type = EXCLUDED.content_type,
|
||||
sha256 = EXCLUDED.sha256,
|
||||
size_bytes = EXCLUDED.size_bytes,
|
||||
updated_at = NOW(),
|
||||
is_deleted = FALSE,
|
||||
deleted_at = NULL
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
AddParameter(command, "id", entry.Id);
|
||||
AddParameter(command, "tenant_id", entry.TenantId);
|
||||
AddParameter(command, "bom_ref", entry.BomRef);
|
||||
AddParameter(command, "serial_number", entry.SerialNumber);
|
||||
AddParameter(command, "artifact_id", entry.ArtifactId);
|
||||
AddParameter(command, "storage_key", entry.StorageKey);
|
||||
AddParameter(command, "artifact_type", entry.Type.ToString());
|
||||
AddParameter(command, "content_type", entry.ContentType);
|
||||
AddParameter(command, "sha256", entry.Sha256);
|
||||
AddParameter(command, "size_bytes", entry.SizeBytes);
|
||||
AddParameter(command, "created_at", entry.CreatedAt);
|
||||
|
||||
await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
|
||||
is_deleted, deleted_at
|
||||
FROM evidence.artifact_index
|
||||
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND NOT is_deleted
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
|
||||
return await QueryAsync(_tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
|
||||
AddParameter(cmd, "bom_ref", bomRef);
|
||||
}, MapEntry, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
|
||||
is_deleted, deleted_at
|
||||
FROM evidence.artifact_index
|
||||
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number AND NOT is_deleted
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
|
||||
return await QueryAsync(_tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
|
||||
AddParameter(cmd, "bom_ref", bomRef);
|
||||
AddParameter(cmd, "serial_number", serialNumber);
|
||||
}, MapEntry, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ArtifactIndexEntry?> GetAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
string artifactId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
|
||||
is_deleted, deleted_at
|
||||
FROM evidence.artifact_index
|
||||
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
|
||||
AND artifact_id = @artifact_id AND NOT is_deleted
|
||||
""";
|
||||
|
||||
var results = await QueryAsync(_tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
|
||||
AddParameter(cmd, "bom_ref", bomRef);
|
||||
AddParameter(cmd, "serial_number", serialNumber);
|
||||
AddParameter(cmd, "artifact_id", artifactId);
|
||||
}, MapEntry, ct).ConfigureAwait(false);
|
||||
|
||||
return results.Count > 0 ? results[0] : null;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> RemoveAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
string artifactId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
UPDATE evidence.artifact_index
|
||||
SET is_deleted = TRUE, deleted_at = NOW(), updated_at = NOW()
|
||||
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
|
||||
AND artifact_id = @artifact_id AND NOT is_deleted
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
AddParameter(command, "tenant_id", Guid.Parse(_tenantId));
|
||||
AddParameter(command, "bom_ref", bomRef);
|
||||
AddParameter(command, "serial_number", serialNumber);
|
||||
AddParameter(command, "artifact_id", artifactId);
|
||||
|
||||
var rowsAffected = await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
|
||||
return rowsAffected > 0;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
|
||||
is_deleted, deleted_at
|
||||
FROM evidence.artifact_index
|
||||
WHERE sha256 = @sha256 AND NOT is_deleted
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 100
|
||||
""";
|
||||
|
||||
return await QueryAsync(_tenantId, sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "sha256", sha256);
|
||||
}, MapEntry, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
|
||||
ArtifactType type,
|
||||
Guid tenantId,
|
||||
int limit = 100,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
|
||||
is_deleted, deleted_at
|
||||
FROM evidence.artifact_index
|
||||
WHERE tenant_id = @tenant_id AND artifact_type = @artifact_type AND NOT is_deleted
|
||||
ORDER BY created_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
return await QueryAsync(tenantId.ToString(), sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "artifact_type", type.ToString());
|
||||
AddParameter(cmd, "limit", limit);
|
||||
}, MapEntry, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds artifacts within a time range.
|
||||
/// </summary>
|
||||
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTimeRangeAsync(
|
||||
Guid tenantId,
|
||||
DateTimeOffset from,
|
||||
DateTimeOffset to,
|
||||
int limit = 1000,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
|
||||
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
|
||||
is_deleted, deleted_at
|
||||
FROM evidence.artifact_index
|
||||
WHERE tenant_id = @tenant_id AND created_at >= @from AND created_at < @to AND NOT is_deleted
|
||||
ORDER BY created_at DESC
|
||||
LIMIT @limit
|
||||
""";
|
||||
|
||||
return await QueryAsync(tenantId.ToString(), sql, cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "from", from);
|
||||
AddParameter(cmd, "to", to);
|
||||
AddParameter(cmd, "limit", limit);
|
||||
}, MapEntry, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Counts artifacts for a tenant.
|
||||
/// </summary>
|
||||
public async Task<int> CountAsync(Guid tenantId, CancellationToken ct = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT COUNT(*) FROM evidence.artifact_index
|
||||
WHERE tenant_id = @tenant_id AND NOT is_deleted
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenConnectionAsync(tenantId.ToString(), "reader", ct).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
AddParameter(command, "tenant_id", tenantId);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(ct).ConfigureAwait(false);
|
||||
return Convert.ToInt32(result);
|
||||
}
|
||||
|
||||
private static ArtifactIndexEntry MapEntry(NpgsqlDataReader reader)
|
||||
{
|
||||
var artifactTypeString = reader.GetString(6);
|
||||
var artifactType = Enum.TryParse<ArtifactType>(artifactTypeString, out var at) ? at : ArtifactType.Unknown;
|
||||
|
||||
return new ArtifactIndexEntry
|
||||
{
|
||||
Id = reader.GetGuid(0),
|
||||
TenantId = reader.GetGuid(1),
|
||||
BomRef = reader.GetString(2),
|
||||
SerialNumber = reader.GetString(3),
|
||||
ArtifactId = reader.GetString(4),
|
||||
StorageKey = reader.GetString(5),
|
||||
Type = artifactType,
|
||||
ContentType = reader.GetString(7),
|
||||
Sha256 = reader.GetString(8),
|
||||
SizeBytes = reader.GetInt64(9),
|
||||
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10),
|
||||
UpdatedAt = reader.IsDBNull(11) ? null : reader.GetFieldValue<DateTimeOffset>(11),
|
||||
IsDeleted = reader.GetBoolean(12),
|
||||
DeletedAt = reader.IsDBNull(13) ? null : reader.GetFieldValue<DateTimeOffset>(13)
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,429 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// S3ArtifactStore.cs
|
||||
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
|
||||
// Task: AS-002 - Implement S3-backed ArtifactStore
|
||||
// Description: S3-backed implementation of unified artifact store
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Artifact.Core;
|
||||
|
||||
namespace StellaOps.Artifact.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for S3-backed artifact store.
|
||||
/// </summary>
|
||||
public sealed class S3UnifiedArtifactStoreOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// S3 bucket name.
|
||||
/// </summary>
|
||||
public string BucketName { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Path prefix within the bucket.
|
||||
/// </summary>
|
||||
public string Prefix { get; set; } = "artifacts";
|
||||
|
||||
/// <summary>
|
||||
/// Whether to use content-addressable storage for deduplication.
|
||||
/// </summary>
|
||||
public bool EnableDeduplication { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to store metadata as sidecar JSON files.
|
||||
/// </summary>
|
||||
public bool UseSidecarMetadata { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to overwrite existing artifacts.
|
||||
/// </summary>
|
||||
public bool AllowOverwrite { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum artifact size in bytes.
|
||||
/// </summary>
|
||||
public long MaxArtifactSizeBytes { get; set; } = 100 * 1024 * 1024; // 100MB
|
||||
|
||||
/// <summary>
|
||||
/// Retention policies per artifact type. Key is ArtifactType enum name.
|
||||
/// Sprint: SPRINT_20260118_017 (AS-002)
|
||||
/// </summary>
|
||||
public Dictionary<string, RetentionPolicy> RetentionPolicies { get; set; } = new()
|
||||
{
|
||||
["Sbom"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false }, // 7 years
|
||||
["Vex"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
|
||||
["Dsse"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
|
||||
["RekorProof"] = new RetentionPolicy { RetentionDays = 365 * 10, DeleteAfterExpiry = false }, // 10 years
|
||||
["Attestation"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
|
||||
["BuildLog"] = new RetentionPolicy { RetentionDays = 365, DeleteAfterExpiry = true }, // 1 year
|
||||
["ScanResult"] = new RetentionPolicy { RetentionDays = 365 * 2, DeleteAfterExpiry = true }, // 2 years
|
||||
["Temporary"] = new RetentionPolicy { RetentionDays = 30, DeleteAfterExpiry = true }
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Default retention policy for unspecified artifact types.
|
||||
/// </summary>
|
||||
public RetentionPolicy DefaultRetentionPolicy { get; set; } = new()
|
||||
{
|
||||
RetentionDays = 365 * 5, // 5 years default
|
||||
DeleteAfterExpiry = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retention policy for artifact types.
|
||||
/// Sprint: SPRINT_20260118_017 (AS-002)
|
||||
/// </summary>
|
||||
public sealed class RetentionPolicy
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of days to retain artifacts.
|
||||
/// </summary>
|
||||
public int RetentionDays { get; set; } = 365 * 5;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to delete artifacts after expiry (true) or just mark expired (false).
|
||||
/// </summary>
|
||||
public bool DeleteAfterExpiry { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Optional S3 storage class to transition to after specified days.
|
||||
/// </summary>
|
||||
public string? TransitionStorageClass { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Days after creation to transition to TransitionStorageClass.
|
||||
/// </summary>
|
||||
public int? TransitionAfterDays { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// S3 client interface for dependency injection.
|
||||
/// </summary>
|
||||
public interface IS3UnifiedClient
|
||||
{
|
||||
Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken ct);
|
||||
Task PutObjectAsync(string bucketName, string key, Stream content, string contentType, IDictionary<string, string> metadata, CancellationToken ct);
|
||||
Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken ct);
|
||||
Task<IDictionary<string, string>?> GetObjectMetadataAsync(string bucketName, string key, CancellationToken ct);
|
||||
Task DeleteObjectAsync(string bucketName, string key, CancellationToken ct);
|
||||
Task<IReadOnlyList<string>> ListObjectsAsync(string bucketName, string prefix, CancellationToken ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// S3-backed implementation of <see cref="IArtifactStore"/>.
|
||||
/// Supports content deduplication via SHA-256 and the unified path convention.
|
||||
/// </summary>
|
||||
public sealed class S3UnifiedArtifactStore : IArtifactStore
|
||||
{
|
||||
private readonly IS3UnifiedClient _client;
|
||||
private readonly IArtifactIndexRepository _indexRepository;
|
||||
private readonly S3UnifiedArtifactStoreOptions _options;
|
||||
private readonly ILogger<S3UnifiedArtifactStore> _logger;
|
||||
|
||||
public S3UnifiedArtifactStore(
|
||||
IS3UnifiedClient client,
|
||||
IArtifactIndexRepository indexRepository,
|
||||
IOptions<S3UnifiedArtifactStoreOptions> options,
|
||||
ILogger<S3UnifiedArtifactStore> logger)
|
||||
{
|
||||
_client = client ?? throw new ArgumentNullException(nameof(client));
|
||||
_indexRepository = indexRepository ?? throw new ArgumentNullException(nameof(indexRepository));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_options.BucketName))
|
||||
{
|
||||
throw new ArgumentException("BucketName must be configured", nameof(options));
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
try
|
||||
{
|
||||
// Build the storage path using bom-ref convention
|
||||
var storagePath = BomRefEncoder.BuildPath(request.BomRef, request.SerialNumber, request.ArtifactId);
|
||||
var fullKey = BuildFullKey(storagePath);
|
||||
|
||||
// Check if artifact already exists
|
||||
if (!request.Overwrite && !_options.AllowOverwrite)
|
||||
{
|
||||
var exists = await _client.ObjectExistsAsync(_options.BucketName, fullKey, ct).ConfigureAwait(false);
|
||||
if (exists)
|
||||
{
|
||||
_logger.LogInformation("Artifact already exists at {Key}, skipping", fullKey);
|
||||
|
||||
// Return existing metadata
|
||||
var existingEntry = await _indexRepository.GetAsync(
|
||||
request.BomRef, request.SerialNumber, request.ArtifactId, ct).ConfigureAwait(false);
|
||||
|
||||
if (existingEntry != null)
|
||||
{
|
||||
return ArtifactStoreResult.Succeeded(fullKey, existingEntry.Sha256, existingEntry.SizeBytes, wasCreated: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read content and compute hash
|
||||
using var memoryStream = new MemoryStream();
|
||||
await request.Content.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
|
||||
var contentBytes = memoryStream.ToArray();
|
||||
|
||||
if (contentBytes.Length > _options.MaxArtifactSizeBytes)
|
||||
{
|
||||
return ArtifactStoreResult.Failed($"Artifact exceeds maximum size of {_options.MaxArtifactSizeBytes} bytes");
|
||||
}
|
||||
|
||||
var sha256 = ComputeSha256(contentBytes);
|
||||
var sizeBytes = contentBytes.Length;
|
||||
|
||||
// Check for content deduplication
|
||||
string actualStorageKey = fullKey;
|
||||
if (_options.EnableDeduplication)
|
||||
{
|
||||
var existingBySha = await _indexRepository.FindBySha256Async(sha256, ct).ConfigureAwait(false);
|
||||
if (existingBySha.Count > 0)
|
||||
{
|
||||
// Content already exists, just create a new index entry pointing to same content
|
||||
actualStorageKey = existingBySha[0].StorageKey;
|
||||
_logger.LogInformation("Deduplicating artifact {ArtifactId} - content matches {ExistingKey}",
|
||||
request.ArtifactId, actualStorageKey);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Store new content
|
||||
using var uploadStream = new MemoryStream(contentBytes);
|
||||
var metadata = BuildS3Metadata(request);
|
||||
await _client.PutObjectAsync(
|
||||
_options.BucketName, fullKey, uploadStream, request.ContentType, metadata, ct).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Store without deduplication
|
||||
using var uploadStream = new MemoryStream(contentBytes);
|
||||
var metadata = BuildS3Metadata(request);
|
||||
await _client.PutObjectAsync(
|
||||
_options.BucketName, fullKey, uploadStream, request.ContentType, metadata, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Index the artifact
|
||||
var indexEntry = new ArtifactIndexEntry
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = request.TenantId,
|
||||
BomRef = request.BomRef,
|
||||
SerialNumber = request.SerialNumber,
|
||||
ArtifactId = request.ArtifactId,
|
||||
StorageKey = actualStorageKey,
|
||||
Type = request.Type,
|
||||
ContentType = request.ContentType,
|
||||
Sha256 = sha256,
|
||||
SizeBytes = sizeBytes,
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
await _indexRepository.IndexAsync(indexEntry, ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Stored artifact {ArtifactId} for bom-ref {BomRef} at {Key} ({Size} bytes)",
|
||||
request.ArtifactId, request.BomRef, actualStorageKey, sizeBytes);
|
||||
|
||||
return ArtifactStoreResult.Succeeded(actualStorageKey, sha256, sizeBytes, wasCreated: true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to store artifact {ArtifactId}", request.ArtifactId);
|
||||
return ArtifactStoreResult.Failed(ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ArtifactReadResult> ReadAsync(
|
||||
string bomRef,
|
||||
string? serialNumber,
|
||||
string? artifactId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
ArtifactIndexEntry? entry;
|
||||
|
||||
if (serialNumber != null && artifactId != null)
|
||||
{
|
||||
entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
|
||||
}
|
||||
else if (serialNumber != null)
|
||||
{
|
||||
var entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
|
||||
entry = entries.FirstOrDefault();
|
||||
}
|
||||
else
|
||||
{
|
||||
var entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
|
||||
entry = entries.FirstOrDefault();
|
||||
}
|
||||
|
||||
if (entry == null)
|
||||
{
|
||||
return ArtifactReadResult.NotFound($"No artifact found for bom-ref: {bomRef}");
|
||||
}
|
||||
|
||||
var stream = await _client.GetObjectAsync(_options.BucketName, entry.StorageKey, ct).ConfigureAwait(false);
|
||||
if (stream == null)
|
||||
{
|
||||
return ArtifactReadResult.NotFound($"Object not found in S3: {entry.StorageKey}");
|
||||
}
|
||||
|
||||
var metadata = new ArtifactMetadata
|
||||
{
|
||||
StorageKey = entry.StorageKey,
|
||||
BomRef = entry.BomRef,
|
||||
SerialNumber = entry.SerialNumber,
|
||||
ArtifactId = entry.ArtifactId,
|
||||
ContentType = entry.ContentType,
|
||||
SizeBytes = entry.SizeBytes,
|
||||
Sha256 = entry.Sha256,
|
||||
CreatedAt = entry.CreatedAt,
|
||||
Type = entry.Type,
|
||||
TenantId = entry.TenantId
|
||||
};
|
||||
|
||||
return ArtifactReadResult.Succeeded(stream, metadata);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to read artifact for bom-ref {BomRef}", bomRef);
|
||||
return ArtifactReadResult.NotFound(ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<ArtifactMetadata>> ListAsync(
|
||||
string bomRef,
|
||||
string? serialNumber = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
IReadOnlyList<ArtifactIndexEntry> entries;
|
||||
|
||||
if (serialNumber != null)
|
||||
{
|
||||
entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return entries.Select(e => new ArtifactMetadata
|
||||
{
|
||||
StorageKey = e.StorageKey,
|
||||
BomRef = e.BomRef,
|
||||
SerialNumber = e.SerialNumber,
|
||||
ArtifactId = e.ArtifactId,
|
||||
ContentType = e.ContentType,
|
||||
SizeBytes = e.SizeBytes,
|
||||
Sha256 = e.Sha256,
|
||||
CreatedAt = e.CreatedAt,
|
||||
Type = e.Type,
|
||||
TenantId = e.TenantId
|
||||
}).ToList();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> ExistsAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
string artifactId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
|
||||
return entry != null;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ArtifactMetadata?> GetMetadataAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
string artifactId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
|
||||
if (entry == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new ArtifactMetadata
|
||||
{
|
||||
StorageKey = entry.StorageKey,
|
||||
BomRef = entry.BomRef,
|
||||
SerialNumber = entry.SerialNumber,
|
||||
ArtifactId = entry.ArtifactId,
|
||||
ContentType = entry.ContentType,
|
||||
SizeBytes = entry.SizeBytes,
|
||||
Sha256 = entry.Sha256,
|
||||
CreatedAt = entry.CreatedAt,
|
||||
Type = entry.Type,
|
||||
TenantId = entry.TenantId
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> DeleteAsync(
|
||||
string bomRef,
|
||||
string serialNumber,
|
||||
string artifactId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Soft delete in index (don't delete from S3 for audit trail)
|
||||
var removed = await _indexRepository.RemoveAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
|
||||
|
||||
if (removed)
|
||||
{
|
||||
_logger.LogInformation("Soft-deleted artifact {ArtifactId} for bom-ref {BomRef}", artifactId, bomRef);
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
private string BuildFullKey(string relativePath)
|
||||
{
|
||||
var prefix = string.IsNullOrWhiteSpace(_options.Prefix) ? "" : _options.Prefix.TrimEnd('/') + "/";
|
||||
return $"{prefix}{relativePath}";
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hashBytes = SHA256.HashData(content);
|
||||
return Convert.ToHexStringLower(hashBytes);
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> BuildS3Metadata(ArtifactStoreRequest request)
|
||||
{
|
||||
var metadata = new Dictionary<string, string>
|
||||
{
|
||||
["x-amz-meta-bomref"] = request.BomRef,
|
||||
["x-amz-meta-serialnumber"] = request.SerialNumber,
|
||||
["x-amz-meta-artifactid"] = request.ArtifactId,
|
||||
["x-amz-meta-artifacttype"] = request.Type.ToString()
|
||||
};
|
||||
|
||||
if (request.Metadata != null)
|
||||
{
|
||||
foreach (var kvp in request.Metadata)
|
||||
{
|
||||
metadata[$"x-amz-meta-{kvp.Key.ToLowerInvariant()}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,201 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ServiceCollectionExtensions.cs
|
||||
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
|
||||
// Tasks: AS-002, AS-003 - Service registration
|
||||
// Description: DI registration for artifact store services
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Artifact.Core;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
|
||||
namespace StellaOps.Artifact.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering artifact store services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds unified artifact store services with S3 backend.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configuration">Configuration root.</param>
|
||||
/// <param name="sectionName">Configuration section for options.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddUnifiedArtifactStore(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
string sectionName = "ArtifactStore")
|
||||
{
|
||||
// Configure S3 store options
|
||||
services.Configure<S3UnifiedArtifactStoreOptions>(configuration.GetSection($"{sectionName}:S3"));
|
||||
|
||||
// Configure PostgreSQL options for index
|
||||
services.Configure<PostgresOptions>("Artifact", configuration.GetSection($"{sectionName}:Postgres"));
|
||||
|
||||
// Register data source
|
||||
services.AddSingleton<ArtifactDataSource>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsSnapshot<PostgresOptions>>().Get("Artifact");
|
||||
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<ArtifactDataSource>>();
|
||||
return new ArtifactDataSource(Options.Create(options), logger);
|
||||
});
|
||||
|
||||
// Register core services
|
||||
services.AddSingleton<ICycloneDxExtractor, CycloneDxExtractor>();
|
||||
|
||||
// Register index repository
|
||||
services.AddScoped<IArtifactIndexRepository>(sp =>
|
||||
{
|
||||
var dataSource = sp.GetRequiredService<ArtifactDataSource>();
|
||||
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<PostgresArtifactIndexRepository>>();
|
||||
// TODO: Get tenant ID from context
|
||||
return new PostgresArtifactIndexRepository(dataSource, logger, "default");
|
||||
});
|
||||
|
||||
// Register S3 artifact store
|
||||
services.AddScoped<IArtifactStore, S3UnifiedArtifactStore>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds unified artifact store with in-memory backend (for testing).
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddInMemoryArtifactStore(this IServiceCollection services)
|
||||
{
|
||||
services.AddSingleton<ICycloneDxExtractor, CycloneDxExtractor>();
|
||||
services.AddSingleton<IArtifactIndexRepository, InMemoryArtifactIndexRepository>();
|
||||
services.AddSingleton<IArtifactStore, InMemoryArtifactStore>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds artifact migration services.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <param name="configure">Options configuration.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddArtifactMigration(
|
||||
this IServiceCollection services,
|
||||
Action<ArtifactMigrationOptions>? configure = null)
|
||||
{
|
||||
var options = new ArtifactMigrationOptions();
|
||||
configure?.Invoke(options);
|
||||
services.AddSingleton(options);
|
||||
|
||||
services.AddScoped<ArtifactMigrationService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory artifact store for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryArtifactStore : IArtifactStore
|
||||
{
|
||||
private readonly Dictionary<string, (byte[] Content, ArtifactMetadata Metadata)> _artifacts = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
|
||||
{
|
||||
var key = $"{request.BomRef}/{request.SerialNumber}/{request.ArtifactId}";
|
||||
using var ms = new MemoryStream();
|
||||
request.Content.CopyTo(ms);
|
||||
var content = ms.ToArray();
|
||||
|
||||
using var sha = System.Security.Cryptography.SHA256.Create();
|
||||
var hash = sha.ComputeHash(content);
|
||||
var sha256 = Convert.ToHexStringLower(hash);
|
||||
|
||||
var metadata = new ArtifactMetadata
|
||||
{
|
||||
StorageKey = key,
|
||||
BomRef = request.BomRef,
|
||||
SerialNumber = request.SerialNumber,
|
||||
ArtifactId = request.ArtifactId,
|
||||
ContentType = request.ContentType,
|
||||
SizeBytes = content.Length,
|
||||
Sha256 = sha256,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
Type = request.Type,
|
||||
TenantId = request.TenantId
|
||||
};
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
var wasCreated = !_artifacts.ContainsKey(key);
|
||||
_artifacts[key] = (content, metadata);
|
||||
return Task.FromResult(ArtifactStoreResult.Succeeded(key, sha256, content.Length, wasCreated));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<ArtifactReadResult> ReadAsync(string bomRef, string? serialNumber, string? artifactId, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var matching = _artifacts
|
||||
.Where(kvp => kvp.Value.Metadata.BomRef == bomRef)
|
||||
.Where(kvp => serialNumber == null || kvp.Value.Metadata.SerialNumber == serialNumber)
|
||||
.Where(kvp => artifactId == null || kvp.Value.Metadata.ArtifactId == artifactId)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (matching.Value.Content == null)
|
||||
{
|
||||
return Task.FromResult(ArtifactReadResult.NotFound());
|
||||
}
|
||||
|
||||
return Task.FromResult(ArtifactReadResult.Succeeded(
|
||||
new MemoryStream(matching.Value.Content),
|
||||
matching.Value.Metadata));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<ArtifactMetadata>> ListAsync(string bomRef, string? serialNumber = null, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _artifacts.Values
|
||||
.Where(x => x.Metadata.BomRef == bomRef)
|
||||
.Where(x => serialNumber == null || x.Metadata.SerialNumber == serialNumber)
|
||||
.Select(x => x.Metadata)
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<ArtifactMetadata>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<bool> ExistsAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
|
||||
{
|
||||
var key = $"{bomRef}/{serialNumber}/{artifactId}";
|
||||
lock (_lock)
|
||||
{
|
||||
return Task.FromResult(_artifacts.ContainsKey(key));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<ArtifactMetadata?> GetMetadataAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
|
||||
{
|
||||
var key = $"{bomRef}/{serialNumber}/{artifactId}";
|
||||
lock (_lock)
|
||||
{
|
||||
return Task.FromResult(_artifacts.TryGetValue(key, out var entry) ? entry.Metadata : null);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<bool> DeleteAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
|
||||
{
|
||||
var key = $"{bomRef}/{serialNumber}/{artifactId}";
|
||||
lock (_lock)
|
||||
{
|
||||
return Task.FromResult(_artifacts.Remove(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<RootNamespace>StellaOps.Artifact.Infrastructure</RootNamespace>
|
||||
<AssemblyName>StellaOps.Artifact.Infrastructure</AssemblyName>
|
||||
<Description>Unified artifact storage infrastructure implementations for StellaOps</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AWSSDK.S3" />
|
||||
<PackageReference Include="Npgsql" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Artifact.Core\StellaOps.Artifact.Core.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="Migrations\**\*.sql" LogicalName="%(RecursiveDir)%(Filename)%(Extension)" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
Reference in New Issue
Block a user