stabilizaiton work - projects rework for maintenanceability and ui livening

This commit is contained in:
master
2026-02-03 23:40:04 +02:00
parent 074ce117ba
commit 557feefdc3
3305 changed files with 186813 additions and 107843 deletions

View File

@@ -0,0 +1,25 @@
# Artifact Core Tests Charter
## Mission
Own test coverage for artifact core and infrastructure behaviors. Keep tests deterministic and offline-friendly.
## Responsibilities
- Maintain `StellaOps.Artifact.Core.Tests` coverage.
- Validate store, index, and bom-ref encoding behavior with stable fixtures.
- Record remediation status updates in the active sprint tracker.
## Key Paths
- `ArtifactStore*Tests*.cs`
- `CycloneDxExtractor*Tests*.cs`
- `S3UnifiedArtifactStore*Tests*.cs`
## Required Reading
- `docs/operations/artifact-migration-runbook.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/technical/testing/TEST_SUITE_OVERVIEW.md`
- `docs/code-of-conduct/TESTING_PRACTICES.md`
## Working Agreement
- Use fixed IDs/timestamps and stable ordering in fixtures.
- Keep tests offline; no runtime network calls.
- Update sprint status when work starts or finishes.

View File

@@ -0,0 +1,68 @@
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Unit")]
public sealed class ArtifactIndexRepositoryTests
{
[Fact]
public async Task InMemoryIndex_IndexAndFind_SucceedsAsync()
{
var repo = new InMemoryArtifactIndexRepository(ArtifactTestFixtures.TimeProvider);
var entry = CreateEntry(1, "artifact-001", "abc123");
await repo.IndexAsync(entry);
var found = await repo.FindByBomRefAsync(entry.BomRef);
Assert.Single(found);
Assert.Equal(entry.ArtifactId, found[0].ArtifactId);
}
[Fact]
public async Task InMemoryIndex_Remove_SoftDeletesAsync()
{
var repo = new InMemoryArtifactIndexRepository(ArtifactTestFixtures.TimeProvider);
var entry = CreateEntry(2, "artifact-001", "abc123");
await repo.IndexAsync(entry);
await repo.RemoveAsync(entry.BomRef, entry.SerialNumber, entry.ArtifactId);
var found = await repo.FindByBomRefAsync(entry.BomRef);
Assert.Empty(found);
}
[Fact]
public async Task InMemoryIndex_FindBySha256_ReturnsMatchesAsync()
{
var repo = new InMemoryArtifactIndexRepository(ArtifactTestFixtures.TimeProvider);
const string sha256 = "abc123def456";
await repo.IndexAsync(CreateEntry(3, "artifact-1", sha256));
await repo.IndexAsync(CreateEntry(4, "artifact-2", sha256));
var found = await repo.FindBySha256Async(sha256);
Assert.Equal(2, found.Count);
}
private static ArtifactIndexEntry CreateEntry(int id, string artifactId, string sha256)
{
return new ArtifactIndexEntry
{
Id = CreateGuid(id),
TenantId = ArtifactTestFixtures.TenantId,
BomRef = ArtifactTestFixtures.DefaultBomRef,
SerialNumber = ArtifactTestFixtures.DefaultSerialNumber,
ArtifactId = artifactId,
StorageKey = $"artifacts/{artifactId}.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = sha256,
SizeBytes = 1024,
CreatedAt = ArtifactTestFixtures.FixedNow
};
}
private static Guid CreateGuid(int value) => Guid.Parse($"00000000-0000-0000-0000-{value:D12}");
}

View File

@@ -0,0 +1,53 @@
using System.Diagnostics;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class ArtifactStorePerformanceTests
{
[Fact]
public async Task ListByBomRef_1000Artifacts_Under100msAsync()
{
const int artifactCount = 1000;
const int maxDurationMs = 100;
var store = CreateStore();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, ArtifactTestFixtures.TenantId, bomRef);
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
}
var sw = Stopwatch.StartNew();
var list = await store.ListAsync(bomRef);
sw.Stop();
Assert.Equal(artifactCount, list.Count);
Assert.True(sw.ElapsedMilliseconds < maxDurationMs);
}
[Fact]
public async Task ParallelStore_1000Artifacts_UnderThresholdAsync()
{
const int artifactCount = 1000;
const int maxDurationMs = 10000;
var store = CreateStore();
var bomRef = "pkg:docker/perf-parallel/app@sha256:abc123";
var artifacts = GenerateTestArtifacts(artifactCount, ArtifactTestFixtures.TenantId, bomRef);
var sw = Stopwatch.StartNew();
await Parallel.ForEachAsync(artifacts, new ParallelOptions { MaxDegreeOfParallelism = 10 },
async (artifact, ct) =>
{
await store.StoreAsync(artifact, ct);
});
sw.Stop();
var stored = await store.ListAsync(bomRef);
Assert.Equal(artifactCount, stored.Count);
Assert.True(sw.ElapsedMilliseconds < maxDurationMs);
}
}

View File

@@ -0,0 +1,62 @@
using System.Diagnostics;
using StellaOps.Artifact.Core;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class ArtifactStorePerformanceTests
{
[Fact]
public async Task MixedOperations_CompletesSuccessfullyAsync()
{
const int operationCount = 1000;
var store = CreateStore();
var bomRef = "pkg:docker/mixed-test/app@sha256:abc123";
var preloadArtifacts = GenerateTestArtifacts(500, ArtifactTestFixtures.TenantId, bomRef);
foreach (var artifact in preloadArtifacts)
{
await store.StoreAsync(artifact);
}
var random = new Random(42);
var sw = Stopwatch.StartNew();
for (var i = 0; i < operationCount; i++)
{
var op = random.Next(4);
switch (op)
{
case 0:
using (var stream = new MemoryStream(new byte[] { (byte)(i % 256) }))
{
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:{CreateGuid(10000 + i)}",
ArtifactId = $"mixed-artifact-{i}",
Content = stream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = ArtifactTestFixtures.TenantId
});
}
break;
case 1:
var idx = random.Next(preloadArtifacts.Count);
await store.ReadAsync(bomRef, preloadArtifacts[idx].SerialNumber, preloadArtifacts[idx].ArtifactId);
break;
case 2:
await store.ListAsync(bomRef);
break;
case 3:
var checkIdx = random.Next(preloadArtifacts.Count);
await store.ExistsAsync(bomRef, preloadArtifacts[checkIdx].SerialNumber, preloadArtifacts[checkIdx].ArtifactId);
break;
}
}
sw.Stop();
}
}

View File

@@ -0,0 +1,57 @@
using System.Diagnostics;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class ArtifactStorePerformanceTests
{
[Fact]
public async Task Store1000Artifacts_CompletesUnderThresholdAsync()
{
const int artifactCount = 1000;
const int maxDurationMs = 30000;
var store = CreateStore();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, ArtifactTestFixtures.TenantId, bomRef);
var sw = Stopwatch.StartNew();
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
}
sw.Stop();
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"Store operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task Retrieve1000Artifacts_CompletesUnderThresholdAsync()
{
const int artifactCount = 1000;
const int maxDurationMs = 10000;
var store = CreateStore();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, ArtifactTestFixtures.TenantId, bomRef);
var stored = new List<(string BomRef, string Serial, string Id)>();
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
stored.Add((artifact.BomRef, artifact.SerialNumber, artifact.ArtifactId));
}
var sw = Stopwatch.StartNew();
foreach (var (bRef, serial, id) in stored)
{
var result = await store.ReadAsync(bRef, serial, id);
Assert.True(result.Found);
}
sw.Stop();
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"Retrieve operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
}

View File

@@ -1,11 +1,4 @@
// -----------------------------------------------------------------------------
// ArtifactStorePerformanceTests.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Performance test: 1000 artifacts store/retrieve
// Description: Performance benchmarks for artifact store operations
// -----------------------------------------------------------------------------
using System.Diagnostics;
using System.Text;
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
using Xunit;
@@ -13,230 +6,30 @@ using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Performance")]
public sealed class ArtifactStorePerformanceTests
public sealed partial class ArtifactStorePerformanceTests
{
private readonly ITestOutputHelper _output;
public ArtifactStorePerformanceTests(ITestOutputHelper output)
{
_output = output;
}
[Fact]
public async Task Store1000Artifacts_CompletesUnderThreshold()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 30000; // 30 seconds for 1000 artifacts (30ms each avg)
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
// Act
var sw = Stopwatch.StartNew();
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
}
sw.Stop();
// Assert
_output.WriteLine($"Stored {artifactCount} artifacts in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Average: {sw.ElapsedMilliseconds / (double)artifactCount:F2}ms per artifact");
_output.WriteLine($"Throughput: {artifactCount / sw.Elapsed.TotalSeconds:F2} artifacts/second");
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"Store operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task Retrieve1000Artifacts_CompletesUnderThreshold()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 10000; // 10 seconds for 1000 reads (10ms each avg)
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
// Store all artifacts first
var storedArtifacts = new List<(string bomRef, string serial, string id)>();
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
storedArtifacts.Add((artifact.BomRef, artifact.SerialNumber!, artifact.ArtifactId));
}
// Act - Read them all back
var sw = Stopwatch.StartNew();
foreach (var (bRef, serial, id) in storedArtifacts)
{
var result = await store.ReadAsync(bRef, serial, id);
Assert.True(result.Found);
}
sw.Stop();
// Assert
_output.WriteLine($"Retrieved {artifactCount} artifacts in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Average: {sw.ElapsedMilliseconds / (double)artifactCount:F2}ms per artifact");
_output.WriteLine($"Throughput: {artifactCount / sw.Elapsed.TotalSeconds:F2} artifacts/second");
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"Retrieve operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task ListByBomRef_1000Artifacts_Under100ms()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 100; // 100ms as per completion criteria
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
}
// Act
var sw = Stopwatch.StartNew();
var results = await store.ListAsync(bomRef);
sw.Stop();
// Assert
_output.WriteLine($"Listed {results.Count} artifacts in {sw.ElapsedMilliseconds}ms");
Assert.Equal(artifactCount, results.Count);
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"List operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task ParallelStore_1000Artifacts_HandlesContention()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 60000; // 60 seconds with contention
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
// Act - Store in parallel
var sw = Stopwatch.StartNew();
await Parallel.ForEachAsync(
artifacts,
new ParallelOptions { MaxDegreeOfParallelism = 10 },
async (artifact, ct) =>
{
await store.StoreAsync(artifact, ct);
});
sw.Stop();
// Assert
_output.WriteLine($"Parallel stored {artifactCount} artifacts in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Parallelism: 10, Throughput: {artifactCount / sw.Elapsed.TotalSeconds:F2} artifacts/second");
var stored = await store.ListAsync(bomRef);
Assert.Equal(artifactCount, stored.Count);
Assert.True(sw.ElapsedMilliseconds < maxDurationMs);
}
[Fact]
public async Task MixedOperations_CompletesSuccessfully()
{
// Arrange
const int operationCount = 1000;
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/mixed-test/app@sha256:abc123";
// Pre-populate with 500 artifacts
var preloadArtifacts = GenerateTestArtifacts(500, tenantId, bomRef);
foreach (var artifact in preloadArtifacts)
{
await store.StoreAsync(artifact);
}
var random = new Random(42); // Deterministic seed for reproducibility
var sw = Stopwatch.StartNew();
// Act - Mix of operations
for (var i = 0; i < operationCount; i++)
{
var op = random.Next(4);
switch (op)
{
case 0: // Store
using (var stream = new MemoryStream(new byte[] { (byte)(i % 256) }))
{
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:mixed-{i}",
ArtifactId = $"mixed-artifact-{i}",
Content = stream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = tenantId
});
}
break;
case 1: // Read existing
var idx = random.Next(preloadArtifacts.Count);
await store.ReadAsync(bomRef, preloadArtifacts[idx].SerialNumber, preloadArtifacts[idx].ArtifactId);
break;
case 2: // List
await store.ListAsync(bomRef);
break;
case 3: // Exists check
var checkIdx = random.Next(preloadArtifacts.Count);
await store.ExistsAsync(bomRef, preloadArtifacts[checkIdx].SerialNumber!, preloadArtifacts[checkIdx].ArtifactId);
break;
}
}
sw.Stop();
// Assert
_output.WriteLine($"Completed {operationCount} mixed operations in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Operations/second: {operationCount / sw.Elapsed.TotalSeconds:F2}");
}
private static InMemoryArtifactStore CreateStore() => new(ArtifactTestFixtures.TimeProvider);
private static List<ArtifactStoreRequest> GenerateTestArtifacts(int count, Guid tenantId, string bomRef)
{
var artifacts = new List<ArtifactStoreRequest>();
var artifacts = new List<ArtifactStoreRequest>(count);
for (var i = 0; i < count; i++)
{
var content = System.Text.Encoding.UTF8.GetBytes($"{{\"index\": {i}, \"data\": \"test-{Guid.NewGuid()}\"}}");
var content = Encoding.UTF8.GetBytes($"{{\"index\":{i},\"data\":\"test-{i:D4}\"}}");
artifacts.Add(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:{Guid.NewGuid()}",
SerialNumber = $"urn:uuid:{CreateGuid(i + 1)}",
ArtifactId = $"artifact-{i:D5}",
Content = new MemoryStream(content),
ContentType = "application/json",
Type = (ArtifactType)(i % 5), // Rotate through types
Type = (ArtifactType)(i % 5),
TenantId = tenantId
});
}
return artifacts;
}
private static Guid CreateGuid(int value) => Guid.Parse($"00000000-0000-0000-0000-{value:D12}");
}

View File

@@ -0,0 +1,43 @@
using StellaOps.Artifact.Core;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class ArtifactStoreTests
{
[Fact]
public async Task InMemoryStore_Exists_ReturnsTrueForExistingAsync()
{
var store = ArtifactTestFixtures.CreateStore();
var bomRef = ArtifactTestFixtures.DefaultBomRef;
var serial = ArtifactTestFixtures.DefaultSerialNumber;
var artifactId = "artifact-001";
using var contentStream = new MemoryStream(new byte[] { 1, 2, 3 });
var request = ArtifactTestFixtures.CreateRequest(artifactId, contentStream, bomRef, serial);
await store.StoreAsync(request);
Assert.True(await store.ExistsAsync(bomRef, serial, artifactId));
Assert.False(await store.ExistsAsync(bomRef, serial, "nonexistent"));
}
[Fact]
public async Task InMemoryStore_Delete_RemovesArtifactAsync()
{
var store = ArtifactTestFixtures.CreateStore();
var bomRef = ArtifactTestFixtures.DefaultBomRef;
var serial = ArtifactTestFixtures.DefaultSerialNumber;
var artifactId = "artifact-001";
using var contentStream = new MemoryStream(new byte[] { 1, 2, 3 });
var request = ArtifactTestFixtures.CreateRequest(artifactId, contentStream, bomRef, serial);
await store.StoreAsync(request);
Assert.True(await store.ExistsAsync(bomRef, serial, artifactId));
var deleted = await store.DeleteAsync(bomRef, serial, artifactId);
Assert.True(deleted);
Assert.False(await store.ExistsAsync(bomRef, serial, artifactId));
}
}

View File

@@ -0,0 +1,30 @@
using StellaOps.Artifact.Core;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class ArtifactStoreTests
{
[Fact]
public async Task InMemoryStore_List_ReturnsMatchingArtifactsAsync()
{
var store = ArtifactTestFixtures.CreateStore();
var bomRef = ArtifactTestFixtures.DefaultBomRef;
for (var i = 0; i < 2; i++)
{
using var contentStream = new MemoryStream(new byte[] { (byte)i });
var request = ArtifactTestFixtures.CreateRequest($"artifact-{i}", contentStream, bomRef);
await store.StoreAsync(request);
}
using var otherStream = new MemoryStream(new byte[] { 99 });
var otherRequest = ArtifactTestFixtures.CreateRequest("artifact-other", otherStream, "pkg:docker/other/app@sha256:xyz");
await store.StoreAsync(otherRequest);
var list = await store.ListAsync(bomRef);
Assert.Equal(2, list.Count);
Assert.All(list, a => Assert.Equal(bomRef, a.BomRef));
}
}

View File

@@ -0,0 +1,24 @@
using StellaOps.Artifact.Core;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class ArtifactStoreTests
{
[Fact]
public async Task InMemoryStore_StoreExisting_ReturnsWasCreatedFalseAsync()
{
var store = ArtifactTestFixtures.CreateStore();
using var firstStream = new MemoryStream(new byte[] { 1 });
var request = ArtifactTestFixtures.CreateRequest("artifact-001", firstStream);
var first = await store.StoreAsync(request);
Assert.True(first.WasCreated);
using var secondStream = new MemoryStream(new byte[] { 2 });
request = request with { Content = secondStream };
var second = await store.StoreAsync(request);
Assert.False(second.WasCreated);
}
}

View File

@@ -0,0 +1,31 @@
using System.Text;
using StellaOps.Artifact.Core;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Unit")]
public sealed partial class ArtifactStoreTests
{
[Fact]
public async Task InMemoryStore_StoreAndRead_SucceedsAsync()
{
var store = ArtifactTestFixtures.CreateStore();
using var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("{\"test\": true}"));
var request = ArtifactTestFixtures.CreateRequest("artifact-001", contentStream);
var storeResult = await store.StoreAsync(request);
Assert.True(storeResult.Success);
Assert.True(storeResult.WasCreated);
Assert.NotNull(storeResult.Sha256);
Assert.Equal(contentStream.Length, storeResult.SizeBytes);
var readResult = await store.ReadAsync(request.BomRef, request.SerialNumber, request.ArtifactId);
Assert.True(readResult.Found);
Assert.NotNull(readResult.Content);
Assert.NotNull(readResult.Metadata);
Assert.Equal(request.BomRef, readResult.Metadata.BomRef);
}
}

View File

@@ -1,429 +0,0 @@
// -----------------------------------------------------------------------------
// ArtifactStoreTests.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Tasks: AS-001, AS-002, AS-003 - Unit tests
// Description: Unit tests for unified artifact store
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Unit")]
public sealed class ArtifactStoreTests
{
[Fact]
public async Task InMemoryStore_StoreAndRead_Succeeds()
{
var store = new InMemoryArtifactStore();
var content = System.Text.Encoding.UTF8.GetBytes("{\"test\": true}");
using var contentStream = new MemoryStream(content);
var request = new ArtifactStoreRequest
{
BomRef = "pkg:docker/test/app@sha256:abc123",
SerialNumber = "urn:uuid:12345678-1234-1234-1234-123456789012",
ArtifactId = "artifact-001",
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
};
var storeResult = await store.StoreAsync(request);
Assert.True(storeResult.Success);
Assert.True(storeResult.WasCreated);
Assert.NotNull(storeResult.Sha256);
Assert.Equal(content.Length, storeResult.SizeBytes);
// Read it back
var readResult = await store.ReadAsync(
request.BomRef,
request.SerialNumber,
request.ArtifactId);
Assert.True(readResult.Found);
Assert.NotNull(readResult.Content);
Assert.NotNull(readResult.Metadata);
Assert.Equal(request.BomRef, readResult.Metadata.BomRef);
}
[Fact]
public async Task InMemoryStore_List_ReturnsMatchingArtifacts()
{
var store = new InMemoryArtifactStore();
var bomRef = "pkg:docker/test/app@sha256:abc123";
var tenantId = Guid.NewGuid();
// Store two artifacts with same bom-ref
for (var i = 0; i < 2; i++)
{
using var contentStream = new MemoryStream(new byte[] { (byte)i });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:serial-{i}",
ArtifactId = $"artifact-{i}",
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = tenantId
});
}
// Store one with different bom-ref
using var otherStream = new MemoryStream(new byte[] { 99 });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = "pkg:docker/other/app@sha256:xyz",
SerialNumber = "urn:uuid:other",
ArtifactId = "artifact-other",
Content = otherStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = tenantId
});
var list = await store.ListAsync(bomRef);
Assert.Equal(2, list.Count);
Assert.All(list, a => Assert.Equal(bomRef, a.BomRef));
}
[Fact]
public async Task InMemoryStore_Exists_ReturnsTrueForExisting()
{
var store = new InMemoryArtifactStore();
var bomRef = "pkg:docker/test/app@sha256:abc123";
var serial = "urn:uuid:12345678-1234-1234-1234-123456789012";
var artifactId = "artifact-001";
using var contentStream = new MemoryStream(new byte[] { 1, 2, 3 });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = serial,
ArtifactId = artifactId,
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
});
Assert.True(await store.ExistsAsync(bomRef, serial, artifactId));
Assert.False(await store.ExistsAsync(bomRef, serial, "nonexistent"));
}
[Fact]
public async Task InMemoryStore_Delete_RemovesArtifact()
{
var store = new InMemoryArtifactStore();
var bomRef = "pkg:docker/test/app@sha256:abc123";
var serial = "urn:uuid:12345678-1234-1234-1234-123456789012";
var artifactId = "artifact-001";
using var contentStream = new MemoryStream(new byte[] { 1, 2, 3 });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = serial,
ArtifactId = artifactId,
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
});
Assert.True(await store.ExistsAsync(bomRef, serial, artifactId));
var deleted = await store.DeleteAsync(bomRef, serial, artifactId);
Assert.True(deleted);
Assert.False(await store.ExistsAsync(bomRef, serial, artifactId));
}
[Fact]
public async Task InMemoryStore_StoreExisting_ReturnsWasCreatedFalse()
{
var store = new InMemoryArtifactStore();
var request = new ArtifactStoreRequest
{
BomRef = "pkg:docker/test/app@sha256:abc123",
SerialNumber = "urn:uuid:12345678-1234-1234-1234-123456789012",
ArtifactId = "artifact-001",
Content = new MemoryStream(new byte[] { 1 }),
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
};
var first = await store.StoreAsync(request);
Assert.True(first.WasCreated);
// Store again (with new stream)
request = request with { Content = new MemoryStream(new byte[] { 2 }) };
var second = await store.StoreAsync(request);
Assert.False(second.WasCreated);
}
}
[Trait("Category", "Unit")]
public sealed class BomRefEncoderTests
{
[Theory]
[InlineData("pkg:docker/acme/api@sha256:abc", "pkg_docker_acme_api_at_sha256_abc")]
[InlineData("simple-ref", "simple-ref")]
[InlineData("ref/with/slashes", "ref_with_slashes")]
[InlineData("pkg:npm/@scope/pkg", "pkg_npm__at_scope_pkg")]
public void Encode_HandlesSpecialCharacters(string input, string expected)
{
var result = BomRefEncoder.Encode(input);
Assert.Equal(expected, result);
}
[Fact]
public void BuildPath_CreatesCorrectStructure()
{
var bomRef = "pkg:docker/acme/api@sha256:abc";
var serial = "urn:uuid:12345";
var artifactId = "envelope-001";
var path = BomRefEncoder.BuildPath(bomRef, serial, artifactId);
Assert.StartsWith("artifacts/", path);
Assert.EndsWith(".json", path);
Assert.Contains("envelope-001", path);
}
[Fact]
public void Encode_EmptyInput_ReturnsUnknown()
{
Assert.Equal("unknown", BomRefEncoder.Encode(""));
Assert.Equal("unknown", BomRefEncoder.Encode(" "));
}
}
[Trait("Category", "Unit")]
public sealed class CycloneDxExtractorTests
{
private readonly CycloneDxExtractor _extractor = new();
[Fact]
public async Task ExtractAsync_ValidCycloneDx_ExtractsMetadata()
{
var sbom = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.5",
"serialNumber": "urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79",
"version": 1,
"metadata": {
"timestamp": "2026-01-18T12:00:00Z",
"component": {
"type": "application",
"bom-ref": "acme-app",
"name": "ACME Application",
"version": "1.0.0",
"purl": "pkg:docker/acme/app@1.0.0"
}
},
"components": [
{
"type": "library",
"bom-ref": "component-1",
"name": "some-lib",
"purl": "pkg:npm/some-lib@1.0.0"
}
]
}
""";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sbom));
var result = await _extractor.ExtractAsync(stream);
Assert.True(result.Success);
Assert.Equal("urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", result.SerialNumber);
Assert.Equal("1.5", result.SpecVersion);
Assert.Equal(1, result.Version);
Assert.Equal("acme-app", result.PrimaryBomRef);
Assert.Equal("ACME Application", result.PrimaryName);
Assert.Equal("1.0.0", result.PrimaryVersion);
Assert.Equal("pkg:docker/acme/app@1.0.0", result.PrimaryPurl);
Assert.Single(result.ComponentBomRefs);
Assert.Single(result.ComponentPurls);
}
[Fact]
public async Task ExtractParsedAsync_ValidCycloneDx_ExtractsParsedSbom()
{
var sbom = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.5",
"serialNumber": "urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79",
"version": 1,
"metadata": {
"timestamp": "2026-01-18T12:00:00Z",
"component": {
"type": "application",
"bom-ref": "acme-app",
"name": "ACME Application",
"version": "1.0.0",
"purl": "pkg:docker/acme/app@1.0.0"
}
},
"components": [
{
"type": "library",
"bom-ref": "component-1",
"name": "some-lib",
"purl": "pkg:npm/some-lib@1.0.0"
}
]
}
""";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sbom));
var result = await _extractor.ExtractParsedAsync(stream);
Assert.Equal("cyclonedx", result.Format);
Assert.Equal("1.5", result.SpecVersion);
Assert.Equal("urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", result.SerialNumber);
Assert.Equal("ACME Application", result.Metadata.Name);
Assert.Equal("acme-app", result.Metadata.RootComponentRef);
Assert.Contains(result.Components, component => component.BomRef == "acme-app");
Assert.Contains(result.Components, component => component.BomRef == "component-1");
}
[Fact]
public async Task ExtractAsync_MissingOptionalFields_Succeeds()
{
var sbom = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.4",
"version": 1,
"components": []
}
""";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sbom));
var result = await _extractor.ExtractAsync(stream);
Assert.True(result.Success);
Assert.Null(result.SerialNumber);
Assert.Equal("1.4", result.SpecVersion);
Assert.Null(result.PrimaryBomRef);
}
[Fact]
public async Task ExtractAsync_InvalidJson_ReturnsError()
{
var invalid = "not valid json";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(invalid));
var result = await _extractor.ExtractAsync(stream);
Assert.False(result.Success);
Assert.NotNull(result.Error);
}
}
[Trait("Category", "Unit")]
public sealed class ArtifactIndexRepositoryTests
{
[Fact]
public async Task InMemoryIndex_IndexAndFind_Succeeds()
{
var repo = new InMemoryArtifactIndexRepository();
var entry = new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app@sha256:abc",
SerialNumber = "urn:uuid:12345",
ArtifactId = "artifact-001",
StorageKey = "artifacts/test/artifact-001.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = "abc123",
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
};
await repo.IndexAsync(entry);
var found = await repo.FindByBomRefAsync(entry.BomRef);
Assert.Single(found);
Assert.Equal(entry.ArtifactId, found[0].ArtifactId);
}
[Fact]
public async Task InMemoryIndex_Remove_SoftDeletes()
{
var repo = new InMemoryArtifactIndexRepository();
var entry = new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app@sha256:abc",
SerialNumber = "urn:uuid:12345",
ArtifactId = "artifact-001",
StorageKey = "artifacts/test/artifact-001.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = "abc123",
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
};
await repo.IndexAsync(entry);
await repo.RemoveAsync(entry.BomRef, entry.SerialNumber, entry.ArtifactId);
var found = await repo.FindByBomRefAsync(entry.BomRef);
Assert.Empty(found);
}
[Fact]
public async Task InMemoryIndex_FindBySha256_ReturnsMatches()
{
var repo = new InMemoryArtifactIndexRepository();
var sha256 = "abc123def456";
await repo.IndexAsync(new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app1",
SerialNumber = "urn:uuid:1",
ArtifactId = "artifact-1",
StorageKey = "artifacts/1.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = sha256,
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
});
await repo.IndexAsync(new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app2",
SerialNumber = "urn:uuid:2",
ArtifactId = "artifact-2",
StorageKey = "artifacts/2.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = sha256,
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
});
var found = await repo.FindBySha256Async(sha256);
Assert.Equal(2, found.Count);
}
}

View File

@@ -0,0 +1,29 @@
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
namespace StellaOps.Artifact.Tests;
internal static class ArtifactTestFixtures
{
internal static readonly DateTimeOffset FixedNow = new(2026, 2, 3, 0, 0, 0, TimeSpan.Zero);
internal static readonly TimeProvider TimeProvider = new FixedTimeProvider(FixedNow);
internal static readonly Guid TenantId = Guid.Parse("11111111-1111-1111-1111-111111111111");
internal const string DefaultBomRef = "pkg:docker/test/app@sha256:abc123";
internal const string DefaultSerialNumber = "urn:uuid:12345678-1234-1234-1234-123456789012";
internal static InMemoryArtifactStore CreateStore() => new(TimeProvider);
internal static ArtifactStoreRequest CreateRequest(string artifactId, Stream content, string? bomRef = null, string? serialNumber = null)
{
return new ArtifactStoreRequest
{
BomRef = bomRef ?? DefaultBomRef,
SerialNumber = serialNumber ?? DefaultSerialNumber,
ArtifactId = artifactId,
Content = content,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = TenantId
};
}
}

View File

@@ -0,0 +1,40 @@
using StellaOps.Artifact.Core;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Unit")]
public sealed class BomRefEncoderTests
{
[Theory]
[InlineData("pkg:docker/acme/api@sha256:abc", "pkg_docker_acme_api_at_sha256_abc")]
[InlineData("simple-ref", "simple-ref")]
[InlineData("ref/with/slashes", "ref_with_slashes")]
[InlineData("pkg:npm/@scope/pkg", "pkg_npm__at_scope_pkg")]
public void Encode_HandlesSpecialCharacters(string input, string expected)
{
var result = BomRefEncoder.Encode(input);
Assert.Equal(expected, result);
}
[Fact]
public void BuildPath_CreatesCorrectStructure()
{
var bomRef = "pkg:docker/acme/api@sha256:abc";
var serial = "urn:uuid:12345";
var artifactId = "envelope-001";
var path = BomRefEncoder.BuildPath(bomRef, serial, artifactId);
Assert.StartsWith("artifacts/", path);
Assert.EndsWith(".json", path);
Assert.Contains("envelope-001", path);
}
[Fact]
public void Encode_EmptyInput_ReturnsUnknown()
{
Assert.Equal("unknown", BomRefEncoder.Encode(""));
Assert.Equal("unknown", BomRefEncoder.Encode(" "));
}
}

View File

@@ -0,0 +1,48 @@
using System.Text;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class CycloneDxExtractorTests
{
[Fact]
public async Task ExtractAsync_ValidCycloneDx_ExtractsMetadataAsync()
{
using var stream = new MemoryStream(Encoding.UTF8.GetBytes(ValidCycloneDx));
var result = await _extractor.ExtractAsync(stream);
Assert.True(result.Success);
Assert.Equal("urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", result.SerialNumber);
Assert.Equal("1.5", result.SpecVersion);
Assert.Equal(1, result.Version);
Assert.Equal("acme-app", result.PrimaryBomRef);
Assert.Equal("ACME Application", result.PrimaryName);
Assert.Equal("1.0.0", result.PrimaryVersion);
Assert.Equal("pkg:docker/acme/app@1.0.0", result.PrimaryPurl);
Assert.Single(result.ComponentBomRefs);
Assert.Single(result.ComponentPurls);
}
[Fact]
public async Task ExtractAsync_MissingOptionalFields_SucceedsAsync()
{
var sbom = "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.4\",\"version\":1,\"components\":[]}";
using var stream = new MemoryStream(Encoding.UTF8.GetBytes(sbom));
var result = await _extractor.ExtractAsync(stream);
Assert.True(result.Success);
Assert.Null(result.SerialNumber);
Assert.Equal("1.4", result.SpecVersion);
Assert.Null(result.PrimaryBomRef);
}
[Fact]
public async Task ExtractAsync_InvalidJson_ReturnsErrorAsync()
{
using var stream = new MemoryStream(Encoding.UTF8.GetBytes("not valid json"));
var result = await _extractor.ExtractAsync(stream);
Assert.False(result.Success);
Assert.NotNull(result.Error);
}
}

View File

@@ -0,0 +1,22 @@
using System.Text;
using Xunit;
namespace StellaOps.Artifact.Tests;
public sealed partial class CycloneDxExtractorTests
{
[Fact]
public async Task ExtractParsedAsync_ValidCycloneDx_ExtractsParsedSbomAsync()
{
using var stream = new MemoryStream(Encoding.UTF8.GetBytes(ValidCycloneDx));
var result = await _extractor.ExtractParsedAsync(stream);
Assert.Equal("cyclonedx", result.Format);
Assert.Equal("1.5", result.SpecVersion);
Assert.Equal("urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", result.SerialNumber);
Assert.Equal("ACME Application", result.Metadata.Name);
Assert.Equal("acme-app", result.Metadata.RootComponentRef);
Assert.Contains(result.Components, component => component.BomRef == "acme-app");
Assert.Contains(result.Components, component => component.BomRef == "component-1");
}
}

View File

@@ -0,0 +1,15 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Artifact.Core;
using StellaOps.Concelier.SbomIntegration.Parsing;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Unit")]
public sealed partial class CycloneDxExtractorTests
{
private const string ValidCycloneDx = "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\",\"serialNumber\":\"urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79\",\"version\":1,\"metadata\":{\"timestamp\":\"2026-01-18T12:00:00Z\",\"component\":{\"type\":\"application\",\"bom-ref\":\"acme-app\",\"name\":\"ACME Application\",\"version\":\"1.0.0\",\"purl\":\"pkg:docker/acme/app@1.0.0\"}},\"components\":[{\"type\":\"library\",\"bom-ref\":\"component-1\",\"name\":\"some-lib\",\"purl\":\"pkg:npm/some-lib@1.0.0\"}]}";
private readonly CycloneDxExtractor _extractor = new(
new ParsedSbomParser(NullLogger<ParsedSbomParser>.Instance));
}

View File

@@ -0,0 +1,98 @@
using StellaOps.Artifact.Infrastructure;
namespace StellaOps.Artifact.Tests;
internal sealed class FakeS3UnifiedClient : IS3UnifiedClient
{
private readonly Dictionary<string, Dictionary<string, StoredObject>> _objects = new();
private readonly object _lock = new();
public int StoredObjectCount
{
get
{
lock (_lock)
{
return _objects.Values.Sum(bucket => bucket.Count);
}
}
}
public Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken ct)
{
lock (_lock)
{
return Task.FromResult(_objects.TryGetValue(bucketName, out var bucket) && bucket.ContainsKey(key));
}
}
public Task PutObjectAsync(string bucketName, string key, Stream content, string contentType, IDictionary<string, string> metadata, CancellationToken ct)
{
using var ms = new MemoryStream();
content.CopyTo(ms);
var data = ms.ToArray();
lock (_lock)
{
var bucket = GetBucket(bucketName);
bucket[key] = new StoredObject(data, new Dictionary<string, string>(metadata));
}
return Task.CompletedTask;
}
public Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken ct)
{
lock (_lock)
{
if (!_objects.TryGetValue(bucketName, out var bucket) || !bucket.TryGetValue(key, out var stored))
{
return Task.FromResult<Stream?>(null);
}
return Task.FromResult<Stream?>(new MemoryStream(stored.Content, writable: false));
}
}
public Task<IDictionary<string, string>?> GetObjectMetadataAsync(string bucketName, string key, CancellationToken ct)
{
lock (_lock)
{
if (!_objects.TryGetValue(bucketName, out var bucket) || !bucket.TryGetValue(key, out var stored))
{
return Task.FromResult<IDictionary<string, string>?>(null);
}
return Task.FromResult<IDictionary<string, string>?>(new Dictionary<string, string>(stored.Metadata));
}
}
public Task DeleteObjectAsync(string bucketName, string key, CancellationToken ct)
{
lock (_lock)
{
if (_objects.TryGetValue(bucketName, out var bucket))
{
bucket.Remove(key);
}
}
return Task.CompletedTask;
}
public Task<IReadOnlyList<string>> ListObjectsAsync(string bucketName, string prefix, CancellationToken ct)
{
lock (_lock)
{
if (!_objects.TryGetValue(bucketName, out var bucket))
{
return Task.FromResult<IReadOnlyList<string>>(Array.Empty<string>());
}
var result = bucket.Keys
.Where(key => key.StartsWith(prefix, StringComparison.Ordinal))
.OrderBy(key => key, StringComparer.Ordinal)
.ToList();
return Task.FromResult<IReadOnlyList<string>>(result);
}
}
private Dictionary<string, StoredObject> GetBucket(string bucketName)
{
if (!_objects.TryGetValue(bucketName, out var bucket))
{
bucket = new Dictionary<string, StoredObject>(StringComparer.Ordinal);
_objects[bucketName] = bucket;
}
return bucket;
}
private sealed record StoredObject(byte[] Content, IDictionary<string, string> Metadata);
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.Artifact.Tests;
internal sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _utcNow;
public FixedTimeProvider(DateTimeOffset utcNow)
{
_utcNow = utcNow;
}
public override DateTimeOffset GetUtcNow() => _utcNow;
}

View File

@@ -0,0 +1,79 @@
using System.Text;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
using StellaOps.Determinism;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Integration")]
[Trait("Intent", "Operational")]
public sealed class S3UnifiedArtifactStoreTests
{
[Fact]
public async Task StoreAndRead_RoundTripsContentAsync()
{
var client = new FakeS3UnifiedClient();
var indexRepository = new InMemoryArtifactIndexRepository(ArtifactTestFixtures.TimeProvider);
var store = CreateStore(client, indexRepository);
using var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("{\"value\":1}"));
var request = ArtifactTestFixtures.CreateRequest("artifact-001", contentStream);
var stored = await store.StoreAsync(request);
Assert.True(stored.Success);
var read = await store.ReadAsync(request.BomRef, request.SerialNumber, request.ArtifactId);
Assert.True(read.Found);
using var reader = new StreamReader(read.Content!);
var payload = await reader.ReadToEndAsync();
Assert.Equal("{\"value\":1}", payload);
}
[Fact]
public async Task Store_DeduplicatesBySha256Async()
{
var client = new FakeS3UnifiedClient();
var indexRepository = new InMemoryArtifactIndexRepository(ArtifactTestFixtures.TimeProvider);
var store = CreateStore(client, indexRepository);
var payload = Encoding.UTF8.GetBytes("{\"value\":2}");
using var firstStream = new MemoryStream(payload);
using var secondStream = new MemoryStream(payload);
var firstRequest = ArtifactTestFixtures.CreateRequest("artifact-001", firstStream);
var secondRequest = ArtifactTestFixtures.CreateRequest("artifact-002", secondStream);
var first = await store.StoreAsync(firstRequest);
var second = await store.StoreAsync(secondRequest);
Assert.True(first.Success);
Assert.True(second.Success);
Assert.Equal(1, client.StoredObjectCount);
Assert.Equal(first.StorageKey, second.StorageKey);
}
private static S3UnifiedArtifactStore CreateStore(FakeS3UnifiedClient client, InMemoryArtifactIndexRepository indexRepository)
{
var options = new S3UnifiedArtifactStoreOptions
{
BucketName = "test-bucket",
Prefix = "artifacts",
EnableDeduplication = true,
AllowOverwrite = false,
MaxArtifactSizeBytes = 1024 * 1024
};
return new S3UnifiedArtifactStore(
client,
indexRepository,
Options.Create(options),
ArtifactTestFixtures.TimeProvider,
new SequentialGuidProvider(Guid.Parse("00000000-0000-0000-0000-000000000001")),
NullLogger<S3UnifiedArtifactStore>.Instance);
}
}

View File

@@ -0,0 +1,40 @@
// -----------------------------------------------------------------------------
// ArtifactController.Delete.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: Delete endpoint for unified artifact storage
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
/// <summary>
/// Deletes an artifact (soft delete).
/// </summary>
[HttpDelete("{bomRef}/{serialNumber}/{artifactId}")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DeleteArtifactAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var deleted = await _artifactStore.DeleteAsync(decodedBomRef, decodedSerial, artifactId, ct)
.ConfigureAwait(false);
if (!deleted)
{
return NotFound(BuildProblemDetails(
"Not found",
$"Artifact not found: {artifactId}"));
}
return NoContent();
}
}

View File

@@ -0,0 +1,40 @@
// -----------------------------------------------------------------------------
// ArtifactController.Download.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: Download endpoint for unified artifact storage content
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
/// <summary>
/// Downloads artifact content.
/// </summary>
[HttpGet("{bomRef}/{serialNumber}/{artifactId}/content")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DownloadArtifactAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var result = await _artifactStore.ReadAsync(decodedBomRef, decodedSerial, artifactId, ct)
.ConfigureAwait(false);
if (!result.Found || result.Content == null)
{
return NotFound(BuildProblemDetails(
"Not found",
result.ErrorMessage ?? $"Artifact not found: {artifactId}"));
}
return File(result.Content, result.Metadata!.ContentType, $"{artifactId}.json");
}
}

View File

@@ -0,0 +1,32 @@
// -----------------------------------------------------------------------------
// ArtifactController.Fetch.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: Content fetch helpers for artifact submissions
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
/// <summary>
/// Fetches content from a URI (S3, HTTP, file).
/// Sprint: SPRINT_20260118_017 (AS-005) - Validates dsse_uri accessibility.
/// </summary>
private async Task<byte[]> FetchContentFromUriAsync(string uri, CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(uri);
if (!Uri.TryCreate(uri, UriKind.Absolute, out var parsedUri))
{
throw new ArgumentException($"Invalid URI format: {uri}");
}
return parsedUri.Scheme.ToLowerInvariant() switch
{
"s3" => await FetchFromS3Async(parsedUri, ct).ConfigureAwait(false),
"http" or "https" => await FetchFromHttpAsync(parsedUri, ct).ConfigureAwait(false),
"file" => await FetchFromFileAsync(parsedUri, ct).ConfigureAwait(false),
_ => throw new NotSupportedException($"URI scheme not supported: {parsedUri.Scheme}")
};
}
}

View File

@@ -0,0 +1,33 @@
// -----------------------------------------------------------------------------
// ArtifactController.FetchFile.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: File fetch for artifact submissions
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
private async Task<byte[]> FetchFromFileAsync(Uri uri, CancellationToken ct)
{
var filePath = uri.LocalPath;
_logger.LogDebug("Fetching from file: {Path}", filePath);
if (!System.IO.File.Exists(filePath))
{
throw new FileNotFoundException($"File not accessible: {filePath}");
}
var fileInfo = new FileInfo(filePath);
if (fileInfo.Length > 100 * 1024 * 1024)
{
throw new InvalidOperationException(
$"File too large: {fileInfo.Length} bytes exceeds 100MB limit");
}
return await System.IO.File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,46 @@
// -----------------------------------------------------------------------------
// ArtifactController.FetchHttp.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: HTTP fetch for artifact submissions
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using System.Net.Http;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
private async Task<byte[]> FetchFromHttpAsync(Uri uri, CancellationToken ct)
{
_logger.LogDebug("Fetching from HTTP: {Uri}", uri);
using var httpClient = new HttpClient();
httpClient.Timeout = TimeSpan.FromSeconds(30);
try
{
using var headRequest = new HttpRequestMessage(HttpMethod.Head, uri);
using var headResponse = await httpClient.SendAsync(headRequest, ct).ConfigureAwait(false);
if (!headResponse.IsSuccessStatusCode)
{
throw new InvalidOperationException(
$"URI not accessible: {uri} returned {headResponse.StatusCode}");
}
var contentLength = headResponse.Content.Headers.ContentLength;
if (contentLength > 100 * 1024 * 1024)
{
throw new InvalidOperationException(
$"Content too large: {contentLength} bytes exceeds 100MB limit");
}
return await httpClient.GetByteArrayAsync(uri, ct).ConfigureAwait(false);
}
catch (HttpRequestException ex)
{
throw new InvalidOperationException($"Failed to fetch from {uri}: {ex.Message}", ex);
}
}
}

View File

@@ -0,0 +1,25 @@
// -----------------------------------------------------------------------------
// ArtifactController.FetchS3.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: S3 fetch placeholder for artifact submissions
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
private Task<byte[]> FetchFromS3Async(Uri uri, CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
var bucket = uri.Host;
var key = uri.AbsolutePath.TrimStart('/');
_logger.LogDebug("Fetching from S3: bucket={Bucket}, key={Key}", bucket, key);
return Task.FromException<byte[]>(new NotImplementedException(
$"S3 fetch not fully implemented. Configure S3 client. URI: s3://{bucket}/{key}"));
}
}

View File

@@ -0,0 +1,52 @@
// -----------------------------------------------------------------------------
// ArtifactController.Get.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: Get endpoint for unified artifact storage metadata
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
/// <summary>
/// Gets a specific artifact by its composite key.
/// </summary>
[HttpGet("{bomRef}/{serialNumber}/{artifactId}")]
[ActionName(GetArtifactActionName)]
[ProducesResponseType(typeof(ArtifactMetadataResponse), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> GetArtifactAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var metadata = await _artifactStore.GetMetadataAsync(decodedBomRef, decodedSerial, artifactId, ct)
.ConfigureAwait(false);
if (metadata == null)
{
return NotFound(BuildProblemDetails(
"Not found",
$"Artifact not found: {artifactId}"));
}
return Ok(new ArtifactMetadataResponse
{
ArtifactId = metadata.ArtifactId,
BomRef = metadata.BomRef,
SerialNumber = metadata.SerialNumber,
StorageKey = metadata.StorageKey,
ContentType = metadata.ContentType,
Sha256 = metadata.Sha256,
SizeBytes = metadata.SizeBytes,
CreatedAt = metadata.CreatedAt,
ArtifactType = metadata.Type.ToString()
});
}
}

View File

@@ -0,0 +1,73 @@
// -----------------------------------------------------------------------------
// ArtifactController.Helpers.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: Shared helpers for artifact controller operations
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
private Guid GetTenantId()
{
var tenantClaim = User.FindFirst("tenant_id")?.Value;
return Guid.TryParse(tenantClaim, out var id) ? id : Guid.Empty;
}
private static string GenerateSyntheticSerial(string bomRef)
{
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(bomRef));
var guid = new Guid(hash.Take(16).ToArray());
return $"urn:uuid:{guid}";
}
private static ArtifactType ParseArtifactType(string? type)
{
if (string.IsNullOrEmpty(type))
{
return ArtifactType.Unknown;
}
return Enum.TryParse(type, ignoreCase: true, out ArtifactType result)
? result
: ArtifactType.Unknown;
}
private static string DetermineContentType(string? artifactType)
{
return artifactType?.ToLowerInvariant() switch
{
"sbom" => "application/vnd.cyclonedx+json",
"vex" => "application/vnd.openvex+json",
"dsseenvelope" => "application/vnd.dsse+json",
"rekorproof" => "application/json",
_ => "application/json"
};
}
private static int ParseContinuationToken(string? token)
{
if (string.IsNullOrEmpty(token))
{
return 0;
}
try
{
var decoded = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(token));
return int.TryParse(decoded, out var offset) ? offset : 0;
}
catch
{
return 0;
}
}
private static string GenerateContinuationToken(int offset)
{
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(offset.ToString()));
}
}

View File

@@ -0,0 +1,100 @@
// -----------------------------------------------------------------------------
// ArtifactController.List.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: List endpoint for unified artifact storage
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using System.ComponentModel.DataAnnotations;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
/// <summary>
/// Lists artifacts by bom-ref with optional filters.
/// </summary>
/// <param name="bomRef">Required bom-ref filter.</param>
/// <param name="serialNumber">Optional serial number filter.</param>
/// <param name="from">Optional start date filter.</param>
/// <param name="to">Optional end date filter.</param>
/// <param name="limit">Maximum results (default 100).</param>
/// <param name="continuationToken">Pagination token.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of artifact metadata.</returns>
[HttpGet]
[ProducesResponseType(typeof(ArtifactListResponse), StatusCodes.Status200OK)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status400BadRequest)]
public async Task<IActionResult> ListArtifactsAsync(
[FromQuery(Name = "bom_ref"), Required] string bomRef,
[FromQuery(Name = "serial_number")] string? serialNumber,
[FromQuery] DateTimeOffset? from,
[FromQuery] DateTimeOffset? to,
[FromQuery] int limit = 100,
[FromQuery(Name = "continuation_token")] string? continuationToken = null,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(bomRef))
{
return BadRequest(BuildProblemDetails("Invalid request", "bom_ref query parameter is required"));
}
if (limit < 1 || limit > 1000)
{
limit = 100;
}
try
{
var artifacts = await _artifactStore.ListAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
if (from.HasValue)
{
artifacts = artifacts.Where(a => a.CreatedAt >= from.Value).ToList();
}
if (to.HasValue)
{
artifacts = artifacts.Where(a => a.CreatedAt < to.Value).ToList();
}
var offset = ParseContinuationToken(continuationToken);
var totalCount = artifacts.Count;
var pagedArtifacts = artifacts.Skip(offset).Take(limit).ToList();
string? nextToken = null;
if (offset + limit < totalCount)
{
nextToken = GenerateContinuationToken(offset + limit);
}
var response = new ArtifactListResponse
{
Artifacts = pagedArtifacts.Select(a => new ArtifactListItem
{
ArtifactId = a.ArtifactId,
BomRef = a.BomRef,
SerialNumber = a.SerialNumber,
StorageKey = a.StorageKey,
ContentType = a.ContentType,
Sha256 = a.Sha256,
SizeBytes = a.SizeBytes,
CreatedAt = a.CreatedAt,
ArtifactType = a.Type.ToString()
}).ToList(),
Total = totalCount,
ContinuationToken = nextToken
};
return Ok(response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to list artifacts for bom-ref {BomRef}", bomRef);
return StatusCode(
StatusCodes.Status500InternalServerError,
BuildProblemDetails("Internal error", "An unexpected error occurred while listing artifacts"));
}
}
}

View File

@@ -0,0 +1,100 @@
// -----------------------------------------------------------------------------
// ArtifactController.Submit.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: Submit endpoint for unified artifact storage
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
/// <summary>
/// Submits an artifact to the unified store.
/// </summary>
/// <param name="request">Artifact submission request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Created artifact metadata.</returns>
[HttpPost]
[ProducesResponseType(typeof(ArtifactSubmissionResponse), StatusCodes.Status201Created)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status400BadRequest)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status500InternalServerError)]
public async Task<IActionResult> SubmitArtifactAsync(
[FromBody] ArtifactSubmissionRequest request,
CancellationToken ct)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
if (string.IsNullOrWhiteSpace(request.BomRef))
{
return BadRequest(BuildProblemDetails(
"Invalid bom_ref", "bom_ref is required and must be a valid Package URL or CycloneDX bom-ref"));
}
try
{
var serialNumber = request.CyclonedxSerial ?? GenerateSyntheticSerial(request.BomRef);
var (content, contentError) = await ResolveContentAsync(request, ct).ConfigureAwait(false);
if (contentError != null)
{
return BadRequest(contentError);
}
var artifactId = request.ArtifactId ?? Guid.NewGuid().ToString();
var contentType = request.ContentType ?? DetermineContentType(request.ArtifactType);
var tenantId = GetTenantId();
using var contentStream = new MemoryStream(content!);
var storeRequest = new ArtifactStoreRequest
{
BomRef = request.BomRef,
SerialNumber = serialNumber,
ArtifactId = artifactId,
Content = contentStream,
ContentType = contentType,
Type = ParseArtifactType(request.ArtifactType),
Metadata = request.Metadata,
TenantId = tenantId,
Overwrite = request.Overwrite ?? false
};
var result = await _artifactStore.StoreAsync(storeRequest, ct).ConfigureAwait(false);
if (!result.Success)
{
return StatusCode(
StatusCodes.Status500InternalServerError,
BuildProblemDetails("Storage failed", result.ErrorMessage ?? "Storage failed"));
}
var response = new ArtifactSubmissionResponse
{
ArtifactId = artifactId,
BomRef = request.BomRef,
SerialNumber = serialNumber,
StorageKey = result.StorageKey!,
Sha256 = result.Sha256!,
SizeBytes = result.SizeBytes!.Value,
WasCreated = result.WasCreated,
CreatedAt = DateTimeOffset.UtcNow
};
_logger.LogInformation(
"Artifact submitted: {ArtifactId} for bom-ref {BomRef}", artifactId, request.BomRef);
return CreatedAtAction(
GetArtifactActionName, new { bomRef = request.BomRef, serialNumber, artifactId }, response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to submit artifact");
return StatusCode(
StatusCodes.Status500InternalServerError,
BuildProblemDetails("Internal error", "An unexpected error occurred while storing the artifact"));
}
}
}

View File

@@ -0,0 +1,50 @@
// -----------------------------------------------------------------------------
// ArtifactController.SubmitHelpers.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: Helper utilities for artifact submission
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Mvc;
namespace StellaOps.Artifact.Api;
public sealed partial class ArtifactController
{
private static ProblemDetails BuildProblemDetails(string title, string detail)
{
return new ProblemDetails
{
Title = title,
Detail = detail
};
}
private async Task<(byte[]? Content, ProblemDetails? Error)> ResolveContentAsync(
ArtifactSubmissionRequest request,
CancellationToken ct)
{
if (!string.IsNullOrEmpty(request.ContentBase64))
{
try
{
return (Convert.FromBase64String(request.ContentBase64), null);
}
catch (FormatException)
{
return (null, BuildProblemDetails(
"Invalid content",
"content_base64 must be valid Base64-encoded data"));
}
}
if (!string.IsNullOrEmpty(request.DsseUri))
{
var content = await FetchContentFromUriAsync(request.DsseUri, ct).ConfigureAwait(false);
return (content, null);
}
return (null, BuildProblemDetails(
"Missing content",
"Either content_base64 or dsse_uri must be provided"));
}
}

View File

@@ -5,14 +5,10 @@
// AS-007 - Query endpoint for artifacts by bom-ref
// Description: API controller for unified artifact storage
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
using System.ComponentModel.DataAnnotations;
namespace StellaOps.Artifact.Api;
@@ -23,589 +19,18 @@ namespace StellaOps.Artifact.Api;
[Route("api/v1/artifacts")]
[Produces("application/json")]
[Authorize]
public sealed class ArtifactController : ControllerBase
public sealed partial class ArtifactController : ControllerBase
{
private const string GetArtifactActionName = "GetArtifact";
private readonly IArtifactStore _artifactStore;
private readonly ICycloneDxExtractor _cycloneDxExtractor;
private readonly ILogger<ArtifactController> _logger;
public ArtifactController(
IArtifactStore artifactStore,
ICycloneDxExtractor cycloneDxExtractor,
ILogger<ArtifactController> logger)
{
_artifactStore = artifactStore ?? throw new ArgumentNullException(nameof(artifactStore));
_cycloneDxExtractor = cycloneDxExtractor ?? throw new ArgumentNullException(nameof(cycloneDxExtractor));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Submits an artifact to the unified store.
/// </summary>
/// <param name="request">Artifact submission request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Created artifact metadata.</returns>
[HttpPost]
[ProducesResponseType(typeof(ArtifactSubmissionResponse), StatusCodes.Status201Created)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status400BadRequest)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status500InternalServerError)]
public async Task<IActionResult> SubmitArtifact(
[FromBody] ArtifactSubmissionRequest request,
CancellationToken ct)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
try
{
// Validate bom-ref format (should be a valid purl or bom-ref)
if (string.IsNullOrWhiteSpace(request.BomRef))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid bom_ref",
Detail = "bom_ref is required and must be a valid Package URL or CycloneDX bom-ref"
});
}
// Get or generate serial number
var serialNumber = request.CyclonedxSerial ?? GenerateSyntheticSerial(request.BomRef);
// Decode base64 content if provided
byte[] content;
if (!string.IsNullOrEmpty(request.ContentBase64))
{
try
{
content = Convert.FromBase64String(request.ContentBase64);
}
catch (FormatException)
{
return BadRequest(new ProblemDetails
{
Title = "Invalid content",
Detail = "content_base64 must be valid Base64-encoded data"
});
}
}
else if (request.DsseUri != null)
{
// Fetch content from URI (S3, HTTP, etc.)
content = await FetchContentFromUri(request.DsseUri, ct);
}
else
{
return BadRequest(new ProblemDetails
{
Title = "Missing content",
Detail = "Either content_base64 or dsse_uri must be provided"
});
}
// Generate artifact ID if not provided
var artifactId = request.ArtifactId ?? Guid.NewGuid().ToString();
// Determine content type
var contentType = request.ContentType ?? DetermineContentType(request.ArtifactType);
// Get tenant from context
var tenantId = GetTenantId();
// Store the artifact
using var contentStream = new MemoryStream(content);
var storeRequest = new ArtifactStoreRequest
{
BomRef = request.BomRef,
SerialNumber = serialNumber,
ArtifactId = artifactId,
Content = contentStream,
ContentType = contentType,
Type = ParseArtifactType(request.ArtifactType),
Metadata = request.Metadata,
TenantId = tenantId,
Overwrite = request.Overwrite ?? false
};
var result = await _artifactStore.StoreAsync(storeRequest, ct);
if (!result.Success)
{
return StatusCode(StatusCodes.Status500InternalServerError, new ProblemDetails
{
Title = "Storage failed",
Detail = result.ErrorMessage
});
}
var response = new ArtifactSubmissionResponse
{
ArtifactId = artifactId,
BomRef = request.BomRef,
SerialNumber = serialNumber,
StorageKey = result.StorageKey!,
Sha256 = result.Sha256!,
SizeBytes = result.SizeBytes!.Value,
WasCreated = result.WasCreated,
CreatedAt = DateTimeOffset.UtcNow
};
_logger.LogInformation(
"Artifact submitted: {ArtifactId} for bom-ref {BomRef}",
artifactId, request.BomRef);
return CreatedAtAction(
nameof(GetArtifact),
new { bomRef = request.BomRef, serialNumber, artifactId },
response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to submit artifact");
return StatusCode(StatusCodes.Status500InternalServerError, new ProblemDetails
{
Title = "Internal error",
Detail = "An unexpected error occurred while storing the artifact"
});
}
}
/// <summary>
/// Lists artifacts by bom-ref with optional filters.
/// </summary>
/// <param name="bomRef">Required bom-ref filter.</param>
/// <param name="serialNumber">Optional serial number filter.</param>
/// <param name="from">Optional start date filter.</param>
/// <param name="to">Optional end date filter.</param>
/// <param name="limit">Maximum results (default 100).</param>
/// <param name="continuationToken">Pagination token.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of artifact metadata.</returns>
[HttpGet]
[ProducesResponseType(typeof(ArtifactListResponse), StatusCodes.Status200OK)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status400BadRequest)]
public async Task<IActionResult> ListArtifacts(
[FromQuery(Name = "bom_ref"), Required] string bomRef,
[FromQuery(Name = "serial_number")] string? serialNumber,
[FromQuery] DateTimeOffset? from,
[FromQuery] DateTimeOffset? to,
[FromQuery] int limit = 100,
[FromQuery(Name = "continuation_token")] string? continuationToken = null,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(bomRef))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "bom_ref query parameter is required"
});
}
if (limit < 1 || limit > 1000)
{
limit = 100;
}
try
{
var artifacts = await _artifactStore.ListAsync(bomRef, serialNumber, ct);
// Apply time filters if provided
if (from.HasValue)
{
artifacts = artifacts.Where(a => a.CreatedAt >= from.Value).ToList();
}
if (to.HasValue)
{
artifacts = artifacts.Where(a => a.CreatedAt < to.Value).ToList();
}
// Apply pagination
var offset = ParseContinuationToken(continuationToken);
var totalCount = artifacts.Count;
var pagedArtifacts = artifacts.Skip(offset).Take(limit).ToList();
// Generate next continuation token if there are more results
string? nextToken = null;
if (offset + limit < totalCount)
{
nextToken = GenerateContinuationToken(offset + limit);
}
var response = new ArtifactListResponse
{
Artifacts = pagedArtifacts.Select(a => new ArtifactListItem
{
ArtifactId = a.ArtifactId,
BomRef = a.BomRef,
SerialNumber = a.SerialNumber,
StorageKey = a.StorageKey,
ContentType = a.ContentType,
Sha256 = a.Sha256,
SizeBytes = a.SizeBytes,
CreatedAt = a.CreatedAt,
ArtifactType = a.Type.ToString()
}).ToList(),
Total = totalCount,
ContinuationToken = nextToken
};
return Ok(response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to list artifacts for bom-ref {BomRef}", bomRef);
return StatusCode(StatusCodes.Status500InternalServerError, new ProblemDetails
{
Title = "Internal error",
Detail = "An unexpected error occurred while listing artifacts"
});
}
}
/// <summary>
/// Gets a specific artifact by its composite key.
/// </summary>
[HttpGet("{bomRef}/{serialNumber}/{artifactId}")]
[ProducesResponseType(typeof(ArtifactMetadataResponse), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> GetArtifact(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var metadata = await _artifactStore.GetMetadataAsync(decodedBomRef, decodedSerial, artifactId, ct);
if (metadata == null)
{
return NotFound(new ProblemDetails
{
Title = "Not found",
Detail = $"Artifact not found: {artifactId}"
});
}
return Ok(new ArtifactMetadataResponse
{
ArtifactId = metadata.ArtifactId,
BomRef = metadata.BomRef,
SerialNumber = metadata.SerialNumber,
StorageKey = metadata.StorageKey,
ContentType = metadata.ContentType,
Sha256 = metadata.Sha256,
SizeBytes = metadata.SizeBytes,
CreatedAt = metadata.CreatedAt,
ArtifactType = metadata.Type.ToString()
});
}
/// <summary>
/// Downloads artifact content.
/// </summary>
[HttpGet("{bomRef}/{serialNumber}/{artifactId}/content")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DownloadArtifact(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var result = await _artifactStore.ReadAsync(decodedBomRef, decodedSerial, artifactId, ct);
if (!result.Found || result.Content == null)
{
return NotFound(new ProblemDetails
{
Title = "Not found",
Detail = result.ErrorMessage ?? $"Artifact not found: {artifactId}"
});
}
return File(result.Content, result.Metadata!.ContentType, $"{artifactId}.json");
}
/// <summary>
/// Deletes an artifact (soft delete).
/// </summary>
[HttpDelete("{bomRef}/{serialNumber}/{artifactId}")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DeleteArtifact(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var deleted = await _artifactStore.DeleteAsync(decodedBomRef, decodedSerial, artifactId, ct);
if (!deleted)
{
return NotFound(new ProblemDetails
{
Title = "Not found",
Detail = $"Artifact not found: {artifactId}"
});
}
return NoContent();
}
private Guid GetTenantId()
{
// TODO: Extract tenant ID from authenticated user context
var tenantClaim = User.FindFirst("tenant_id")?.Value;
return Guid.TryParse(tenantClaim, out var id) ? id : Guid.Empty;
}
private static string GenerateSyntheticSerial(string bomRef)
{
// Generate a deterministic serial based on bom-ref SHA-256
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(bomRef));
var guid = new Guid(hash.Take(16).ToArray());
return $"urn:uuid:{guid}";
}
private static ArtifactType ParseArtifactType(string? type)
{
if (string.IsNullOrEmpty(type))
return ArtifactType.Unknown;
return Enum.TryParse<ArtifactType>(type, ignoreCase: true, out var result)
? result
: ArtifactType.Unknown;
}
private static string DetermineContentType(string? artifactType)
{
return artifactType?.ToLowerInvariant() switch
{
"sbom" => "application/vnd.cyclonedx+json",
"vex" => "application/vnd.openvex+json",
"dsseenvelope" => "application/vnd.dsse+json",
"rekorproof" => "application/json",
_ => "application/json"
};
}
/// <summary>
/// Fetches content from a URI (S3, HTTP, file).
/// Sprint: SPRINT_20260118_017 (AS-005) - Validates dsse_uri accessibility
/// </summary>
private async Task<byte[]> FetchContentFromUri(string uri, CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(uri);
// Validate URI format
if (!Uri.TryCreate(uri, UriKind.Absolute, out var parsedUri))
{
throw new ArgumentException($"Invalid URI format: {uri}");
}
return parsedUri.Scheme.ToLowerInvariant() switch
{
"s3" => await FetchFromS3Async(parsedUri, ct),
"http" or "https" => await FetchFromHttpAsync(parsedUri, ct),
"file" => await FetchFromFileAsync(parsedUri, ct),
_ => throw new NotSupportedException($"URI scheme not supported: {parsedUri.Scheme}")
};
}
private async Task<byte[]> FetchFromS3Async(Uri uri, CancellationToken ct)
{
// Parse S3 URI: s3://bucket/key
var bucket = uri.Host;
var key = uri.AbsolutePath.TrimStart('/');
_logger.LogDebug("Fetching from S3: bucket={Bucket}, key={Key}", bucket, key);
// Validate accessibility by checking existence first
// This would use the S3 client from DI in a real implementation
// For now, we document the expected behavior and throw
throw new NotImplementedException(
$"S3 fetch not fully implemented. Configure S3 client. URI: s3://{bucket}/{key}");
}
private async Task<byte[]> FetchFromHttpAsync(Uri uri, CancellationToken ct)
{
_logger.LogDebug("Fetching from HTTP: {Uri}", uri);
// Use HttpClient from DI for proper lifecycle management
using var httpClient = new HttpClient();
httpClient.Timeout = TimeSpan.FromSeconds(30);
try
{
// HEAD request first to validate accessibility
using var headRequest = new HttpRequestMessage(HttpMethod.Head, uri);
using var headResponse = await httpClient.SendAsync(headRequest, ct);
if (!headResponse.IsSuccessStatusCode)
{
throw new InvalidOperationException(
$"URI not accessible: {uri} returned {headResponse.StatusCode}");
}
// Check content length to prevent fetching huge files
var contentLength = headResponse.Content.Headers.ContentLength;
if (contentLength > 100 * 1024 * 1024) // 100MB max
{
throw new InvalidOperationException(
$"Content too large: {contentLength} bytes exceeds 100MB limit");
}
// Now fetch the actual content
return await httpClient.GetByteArrayAsync(uri, ct);
}
catch (HttpRequestException ex)
{
throw new InvalidOperationException($"Failed to fetch from {uri}: {ex.Message}", ex);
}
}
private async Task<byte[]> FetchFromFileAsync(Uri uri, CancellationToken ct)
{
var filePath = uri.LocalPath;
_logger.LogDebug("Fetching from file: {Path}", filePath);
if (!System.IO.File.Exists(filePath))
{
throw new FileNotFoundException($"File not accessible: {filePath}");
}
var fileInfo = new FileInfo(filePath);
if (fileInfo.Length > 100 * 1024 * 1024) // 100MB max
{
throw new InvalidOperationException(
$"File too large: {fileInfo.Length} bytes exceeds 100MB limit");
}
return await System.IO.File.ReadAllBytesAsync(filePath, ct);
}
private static int ParseContinuationToken(string? token)
{
if (string.IsNullOrEmpty(token))
return 0;
try
{
var decoded = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(token));
return int.TryParse(decoded, out var offset) ? offset : 0;
}
catch
{
return 0;
}
}
private static string GenerateContinuationToken(int offset)
{
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(offset.ToString()));
}
}
/// <summary>
/// Request to submit an artifact.
/// </summary>
public sealed record ArtifactSubmissionRequest
{
/// <summary>Package URL or CycloneDX bom-ref.</summary>
[Required]
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber (optional, generated if missing).</summary>
public string? CyclonedxSerial { get; init; }
/// <summary>Artifact ID (optional, generated if missing).</summary>
public string? ArtifactId { get; init; }
/// <summary>Base64-encoded content.</summary>
public string? ContentBase64 { get; init; }
/// <summary>URI to fetch content from (S3, HTTP).</summary>
public string? DsseUri { get; init; }
/// <summary>Content type (optional, inferred from artifact_type).</summary>
public string? ContentType { get; init; }
/// <summary>Artifact type: Sbom, Vex, DsseEnvelope, etc.</summary>
public string? ArtifactType { get; init; }
/// <summary>Rekor transparency log UUID (optional).</summary>
public string? RekorUuid { get; init; }
/// <summary>Additional metadata.</summary>
public Dictionary<string, string>? Metadata { get; init; }
/// <summary>Whether to overwrite existing artifact.</summary>
public bool? Overwrite { get; init; }
}
/// <summary>
/// Response from artifact submission.
/// </summary>
public sealed record ArtifactSubmissionResponse
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required bool WasCreated { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Response for listing artifacts.
/// </summary>
public sealed record ArtifactListResponse
{
public required IReadOnlyList<ArtifactListItem> Artifacts { get; init; }
public required int Total { get; init; }
public string? ContinuationToken { get; init; }
}
/// <summary>
/// Item in artifact list response.
/// </summary>
public sealed record ArtifactListItem
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string ContentType { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ArtifactType { get; init; }
}
/// <summary>
/// Response for artifact metadata.
/// </summary>
public sealed record ArtifactMetadataResponse
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string ContentType { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ArtifactType { get; init; }
}

View File

@@ -0,0 +1,23 @@
// -----------------------------------------------------------------------------
// ArtifactListItem.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: Item payload for artifact listing
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Api;
/// <summary>
/// Item in artifact list response.
/// </summary>
public sealed record ArtifactListItem
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string ContentType { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ArtifactType { get; init; }
}

View File

@@ -0,0 +1,17 @@
// -----------------------------------------------------------------------------
// ArtifactListResponse.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: Response payload for artifact listing
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Api;
/// <summary>
/// Response for listing artifacts.
/// </summary>
public sealed record ArtifactListResponse
{
public required IReadOnlyList<ArtifactListItem> Artifacts { get; init; }
public required int Total { get; init; }
public string? ContinuationToken { get; init; }
}

View File

@@ -0,0 +1,23 @@
// -----------------------------------------------------------------------------
// ArtifactMetadataResponse.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-007 - Query endpoint for artifacts by bom-ref
// Description: Response payload for artifact metadata
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Api;
/// <summary>
/// Response for artifact metadata.
/// </summary>
public sealed record ArtifactMetadataResponse
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string ContentType { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ArtifactType { get; init; }
}

View File

@@ -0,0 +1,46 @@
// -----------------------------------------------------------------------------
// ArtifactSubmissionRequest.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: Request payload for artifact submissions
// -----------------------------------------------------------------------------
using System.ComponentModel.DataAnnotations;
namespace StellaOps.Artifact.Api;
/// <summary>
/// Request to submit an artifact.
/// </summary>
public sealed record ArtifactSubmissionRequest
{
/// <summary>Package URL or CycloneDX bom-ref.</summary>
[Required]
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber (optional, generated if missing).</summary>
public string? CyclonedxSerial { get; init; }
/// <summary>Artifact ID (optional, generated if missing).</summary>
public string? ArtifactId { get; init; }
/// <summary>Base64-encoded content.</summary>
public string? ContentBase64 { get; init; }
/// <summary>URI to fetch content from (S3, HTTP).</summary>
public string? DsseUri { get; init; }
/// <summary>Content type (optional, inferred from artifact_type).</summary>
public string? ContentType { get; init; }
/// <summary>Artifact type: Sbom, Vex, DsseEnvelope, etc.</summary>
public string? ArtifactType { get; init; }
/// <summary>Rekor transparency log UUID (optional).</summary>
public string? RekorUuid { get; init; }
/// <summary>Additional metadata.</summary>
public Dictionary<string, string>? Metadata { get; init; }
/// <summary>Whether to overwrite existing artifact.</summary>
public bool? Overwrite { get; init; }
}

View File

@@ -0,0 +1,22 @@
// -----------------------------------------------------------------------------
// ArtifactSubmissionResponse.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-005 - Create artifact submission endpoint
// Description: Response payload for artifact submissions
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Api;
/// <summary>
/// Response from artifact submission.
/// </summary>
public sealed record ArtifactSubmissionResponse
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required bool WasCreated { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,68 @@
// -----------------------------------------------------------------------------
// ArtifactMetadata.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Metadata record for unified artifact store entries
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Artifact metadata.
/// </summary>
public sealed record ArtifactMetadata
{
/// <summary>
/// Full storage key/path.
/// </summary>
public required string StorageKey { get; init; }
/// <summary>
/// Package URL or bom-ref.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber.
/// </summary>
public required string SerialNumber { get; init; }
/// <summary>
/// Artifact ID.
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Content type (MIME).
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// SHA-256 hash.
/// </summary>
public required string Sha256 { get; init; }
/// <summary>
/// Creation timestamp.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Artifact type.
/// </summary>
public ArtifactType Type { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public Guid TenantId { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? ExtraMetadata { get; init; }
}

View File

@@ -0,0 +1,64 @@
// -----------------------------------------------------------------------------
// ArtifactReadResult.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Read operation result for unified artifact store operations
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Result of reading an artifact.
/// </summary>
public sealed record ArtifactReadResult : IDisposable
{
/// <summary>
/// Whether the artifact was found.
/// </summary>
public required bool Found { get; init; }
/// <summary>
/// Content stream (caller must dispose).
/// </summary>
public Stream? Content { get; init; }
/// <summary>
/// Artifact metadata.
/// </summary>
public ArtifactMetadata? Metadata { get; init; }
/// <summary>
/// Error message if not found.
/// </summary>
public string? ErrorMessage { get; init; }
/// <inheritdoc />
public void Dispose()
{
Content?.Dispose();
}
/// <summary>
/// Creates a found result.
/// </summary>
public static ArtifactReadResult Succeeded(Stream content, ArtifactMetadata metadata)
{
return new ArtifactReadResult
{
Found = true,
Content = content,
Metadata = metadata
};
}
/// <summary>
/// Creates a not found result.
/// </summary>
public static ArtifactReadResult NotFound(string? message = null)
{
return new ArtifactReadResult
{
Found = false,
ErrorMessage = message ?? "Artifact not found"
};
}
}

View File

@@ -0,0 +1,58 @@
// -----------------------------------------------------------------------------
// ArtifactStoreRequest.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Storage request for unified artifact store operations
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Request to store an artifact.
/// </summary>
public sealed record ArtifactStoreRequest
{
/// <summary>
/// Package URL (purl) or CycloneDX bom-ref.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber URN (e.g., urn:uuid:...).
/// </summary>
public required string SerialNumber { get; init; }
/// <summary>
/// Unique artifact identifier (e.g., DSSE UUID, hash).
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Artifact content stream.
/// </summary>
public required Stream Content { get; init; }
/// <summary>
/// Content type (MIME type).
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// Artifact type classification.
/// </summary>
public ArtifactType Type { get; init; } = ArtifactType.Unknown;
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// Tenant ID for multi-tenancy.
/// </summary>
public Guid TenantId { get; init; }
/// <summary>
/// Whether to overwrite existing artifact.
/// </summary>
public bool Overwrite { get; init; } = false;
}

View File

@@ -0,0 +1,70 @@
// -----------------------------------------------------------------------------
// ArtifactStoreResult.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Store operation result for unified artifact store operations
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Result of storing an artifact.
/// </summary>
public sealed record ArtifactStoreResult
{
/// <summary>
/// Whether storage was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Storage key (full path).
/// </summary>
public string? StorageKey { get; init; }
/// <summary>
/// SHA-256 hash of stored content.
/// </summary>
public string? Sha256 { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long? SizeBytes { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Whether this was a new artifact or an update.
/// </summary>
public bool WasCreated { get; init; }
/// <summary>
/// Creates a success result.
/// </summary>
public static ArtifactStoreResult Succeeded(string storageKey, string sha256, long sizeBytes, bool wasCreated = true)
{
return new ArtifactStoreResult
{
Success = true,
StorageKey = storageKey,
Sha256 = sha256,
SizeBytes = sizeBytes,
WasCreated = wasCreated
};
}
/// <summary>
/// Creates a failure result.
/// </summary>
public static ArtifactStoreResult Failed(string errorMessage)
{
return new ArtifactStoreResult
{
Success = false,
ErrorMessage = errorMessage
};
}
}

View File

@@ -0,0 +1,46 @@
// -----------------------------------------------------------------------------
// ArtifactType.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Classification enum for artifact types
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Artifact type classification.
/// </summary>
public enum ArtifactType
{
/// <summary>Unknown type.</summary>
Unknown,
/// <summary>SBOM (CycloneDX or SPDX).</summary>
Sbom,
/// <summary>VEX document.</summary>
Vex,
/// <summary>DSSE envelope/attestation.</summary>
DsseEnvelope,
/// <summary>Rekor transparency log proof.</summary>
RekorProof,
/// <summary>Verdict record.</summary>
Verdict,
/// <summary>Policy bundle.</summary>
PolicyBundle,
/// <summary>Provenance attestation.</summary>
Provenance,
/// <summary>Build log.</summary>
BuildLog,
/// <summary>Test results.</summary>
TestResults,
/// <summary>Scan results.</summary>
ScanResults
}

View File

@@ -0,0 +1,62 @@
// -----------------------------------------------------------------------------
// BomRefEncoder.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Utility for encoding bom-refs for storage paths
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Utility for encoding bom-refs for path usage.
/// </summary>
public static class BomRefEncoder
{
/// <summary>
/// Encodes a bom-ref/purl for use in storage paths.
/// Handles special characters in purls (/, @, :, etc.).
/// </summary>
public static string Encode(string bomRef)
{
if (string.IsNullOrWhiteSpace(bomRef))
{
return "unknown";
}
// Replace path-unsafe characters
return bomRef
.Replace("/", "_")
.Replace(":", "_")
.Replace("@", "_at_")
.Replace("?", "_q_")
.Replace("#", "_h_")
.Replace("%", "_p_");
}
/// <summary>
/// Decodes an encoded bom-ref back to original form.
/// </summary>
public static string Decode(string encoded)
{
if (string.IsNullOrWhiteSpace(encoded))
{
return string.Empty;
}
return encoded
.Replace("_at_", "@")
.Replace("_q_", "?")
.Replace("_h_", "#")
.Replace("_p_", "%");
// Note: / and : remain as _ since they're ambiguous
}
/// <summary>
/// Builds the storage path for an artifact.
/// </summary>
public static string BuildPath(string bomRef, string serialNumber, string artifactId)
{
var encodedBomRef = Encode(bomRef);
var encodedSerial = Encode(serialNumber);
return $"artifacts/{encodedBomRef}/{encodedSerial}/{artifactId}.json";
}
}

View File

@@ -0,0 +1,72 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.Auto.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Auto-detection for CycloneDX JSON/XML extraction
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractAutoAsync(Stream stream, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(stream);
var buffer = new byte[1];
var bytesRead = await stream.ReadAsync(buffer, ct).ConfigureAwait(false);
if (bytesRead == 0)
{
return new CycloneDxMetadata { Success = false, Error = "Empty stream" };
}
stream.Position = 0;
var firstChar = (char)buffer[0];
if (firstChar == '\uFEFF')
{
buffer = new byte[1];
await stream.ReadExactlyAsync(buffer, ct).ConfigureAwait(false);
stream.Position = 0;
if (stream.Length >= 3)
{
var bomBuffer = new byte[3];
await stream.ReadExactlyAsync(bomBuffer, ct).ConfigureAwait(false);
if (bomBuffer[0] == 0xEF && bomBuffer[1] == 0xBB && bomBuffer[2] == 0xBF)
{
firstChar = (char)stream.ReadByte();
stream.Position = 3;
}
else
{
stream.Position = 0;
firstChar = (char)buffer[0];
}
}
}
stream.Position = 0;
if (firstChar == '{' || firstChar == '[')
{
return await ExtractAsync(stream, ct).ConfigureAwait(false);
}
if (firstChar == '<')
{
return await ExtractFromXmlAsync(stream, ct).ConfigureAwait(false);
}
try
{
return await ExtractAsync(stream, ct).ConfigureAwait(false);
}
catch
{
stream.Position = 0;
return await ExtractFromXmlAsync(stream, ct).ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,99 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.Json.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: JSON extraction path for CycloneDX metadata
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
/// <inheritdoc />
public CycloneDxMetadata Extract(JsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
try
{
var root = document.RootElement;
var version = ExtractBomVersion(root);
var serialNumber = GetStringProperty(root, "serialNumber");
var specVersion = GetStringProperty(root, "specVersion");
string? primaryBomRef = null;
string? primaryName = null;
string? primaryVersion = null;
string? primaryPurl = null;
DateTimeOffset? timestamp = null;
if (root.TryGetProperty("metadata", out var metadata))
{
if (metadata.TryGetProperty("component", out var component))
{
primaryBomRef = GetStringProperty(component, "bom-ref");
primaryName = GetStringProperty(component, "name");
primaryVersion = GetStringProperty(component, "version");
primaryPurl = GetStringProperty(component, "purl");
}
var timestampValue = GetStringProperty(metadata, "timestamp");
if (timestampValue != null &&
DateTimeOffset.TryParse(timestampValue, out var parsedTimestamp))
{
timestamp = parsedTimestamp;
}
}
var bomRefs = new List<string>();
var purls = new List<string>();
var componentCount = 0;
if (root.TryGetProperty("components", out var components) &&
components.ValueKind == JsonValueKind.Array)
{
foreach (var component in components.EnumerateArray())
{
componentCount++;
var bomRef = GetStringProperty(component, "bom-ref");
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = GetStringProperty(component, "purl");
if (purl != null)
{
purls.Add(purl);
}
ExtractNestedComponents(component, bomRefs, purls, ref componentCount);
}
}
return new CycloneDxMetadata
{
SerialNumber = NormalizeParsedString(serialNumber),
Version = version,
SpecVersion = NormalizeParsedString(specVersion),
PrimaryBomRef = NormalizeParsedString(primaryBomRef),
PrimaryName = NormalizeParsedString(primaryName),
PrimaryVersion = NormalizeParsedString(primaryVersion),
PrimaryPurl = NormalizeParsedString(primaryPurl),
ComponentBomRefs = bomRefs,
ComponentPurls = purls,
ComponentCount = componentCount,
Timestamp = timestamp,
Success = true
};
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
}

View File

@@ -0,0 +1,33 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.JsonAsync.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Async JSON extraction path for CycloneDX metadata
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractAsync(Stream stream, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(stream);
try
{
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: ct)
.ConfigureAwait(false);
return Extract(document);
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
}

View File

@@ -0,0 +1,68 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.JsonHelpers.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: JSON helper utilities for CycloneDX extraction
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
private static string? GetStringProperty(JsonElement element, string propertyName)
{
return element.TryGetProperty(propertyName, out var prop) && prop.ValueKind == JsonValueKind.String
? prop.GetString()
: null;
}
private static void ExtractNestedComponents(
JsonElement component,
List<string> bomRefs,
List<string> purls,
ref int count)
{
if (!component.TryGetProperty("components", out var nested) ||
nested.ValueKind != JsonValueKind.Array)
{
return;
}
foreach (var nestedComponent in nested.EnumerateArray())
{
count++;
var bomRef = GetStringProperty(nestedComponent, "bom-ref");
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = GetStringProperty(nestedComponent, "purl");
if (purl != null)
{
purls.Add(purl);
}
ExtractNestedComponents(nestedComponent, bomRefs, purls, ref count);
}
}
private static int ExtractBomVersion(JsonElement root)
{
if (root.TryGetProperty("version", out var versionProp) &&
versionProp.ValueKind == JsonValueKind.Number &&
versionProp.TryGetInt32(out var version))
{
return version;
}
return 1;
}
private static string? NormalizeParsedString(string? value)
{
return string.IsNullOrWhiteSpace(value) ? null : value;
}
}

View File

@@ -0,0 +1,25 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.Parsed.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Parsed SBOM extraction for CycloneDX metadata
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
/// <inheritdoc />
public async Task<ParsedSbom> ExtractParsedAsync(Stream stream, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(stream);
if (stream.CanSeek)
{
stream.Position = 0;
}
return await _parser.ParseAsync(stream, SbomFormat.CycloneDX, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,99 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.Xml.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: XML extraction path for CycloneDX metadata
// -----------------------------------------------------------------------------
using System.Xml.Linq;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
/// <inheritdoc />
public CycloneDxMetadata ExtractFromXml(XDocument document)
{
ArgumentNullException.ThrowIfNull(document);
try
{
var root = document.Root;
if (root == null)
{
return new CycloneDxMetadata { Success = false, Error = "Empty XML document" };
}
var ns = DetectNamespace(root);
string? serialNumber = root.Attribute("serialNumber")?.Value;
var version = 1;
var versionAttr = root.Attribute("version")?.Value;
if (int.TryParse(versionAttr, out var parsedVersion))
{
version = parsedVersion;
}
var specVersion = ExtractSpecVersion(ns);
string? primaryBomRef = null;
string? primaryName = null;
string? primaryVersion = null;
string? primaryPurl = null;
var metadata = root.Element(ns + "metadata");
if (metadata != null)
{
var primaryComponent = metadata.Element(ns + "component");
if (primaryComponent != null)
{
primaryBomRef = primaryComponent.Attribute("bom-ref")?.Value;
primaryName = primaryComponent.Element(ns + "name")?.Value;
primaryVersion = primaryComponent.Element(ns + "version")?.Value;
primaryPurl = primaryComponent.Element(ns + "purl")?.Value;
}
}
DateTimeOffset? timestamp = null;
var tsElement = metadata?.Element(ns + "timestamp");
if (tsElement != null && DateTimeOffset.TryParse(tsElement.Value, out var parsedTimestamp))
{
timestamp = parsedTimestamp;
}
var bomRefs = new List<string>();
var purls = new List<string>();
var componentCount = 0;
var componentsElement = root.Element(ns + "components");
if (componentsElement != null)
{
ExtractXmlComponents(componentsElement, ns, bomRefs, purls, ref componentCount);
}
return new CycloneDxMetadata
{
SerialNumber = serialNumber,
Version = version,
SpecVersion = specVersion,
PrimaryBomRef = primaryBomRef,
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
PrimaryPurl = primaryPurl,
ComponentBomRefs = bomRefs,
ComponentPurls = purls,
ComponentCount = componentCount,
Timestamp = timestamp,
Success = true
};
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
}

View File

@@ -0,0 +1,42 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.XmlAsync.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Async XML extraction path for CycloneDX metadata
// -----------------------------------------------------------------------------
using System.Xml;
using System.Xml.Linq;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractFromXmlAsync(Stream stream, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(stream);
try
{
var settings = new XmlReaderSettings
{
Async = true,
DtdProcessing = DtdProcessing.Prohibit,
XmlResolver = null
};
using var reader = XmlReader.Create(stream, settings);
var document = await XDocument.LoadAsync(reader, LoadOptions.None, ct)
.ConfigureAwait(false);
return ExtractFromXml(document);
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
}

View File

@@ -0,0 +1,89 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.XmlHelpers.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: XML helper utilities for CycloneDX extraction
// -----------------------------------------------------------------------------
using System.Xml.Linq;
namespace StellaOps.Artifact.Core;
public sealed partial class CycloneDxExtractor
{
private static readonly XNamespace _cdx14 = "http://cyclonedx.org/schema/bom/1.4";
private static readonly XNamespace _cdx15 = "http://cyclonedx.org/schema/bom/1.5";
private static readonly XNamespace _cdx16 = "http://cyclonedx.org/schema/bom/1.6";
private static XNamespace DetectNamespace(XElement root)
{
var ns = root.Name.Namespace;
if (ns == _cdx16 || ns.NamespaceName.Contains("1.6", StringComparison.Ordinal))
{
return _cdx16;
}
if (ns == _cdx15 || ns.NamespaceName.Contains("1.5", StringComparison.Ordinal))
{
return _cdx15;
}
if (ns == _cdx14 || ns.NamespaceName.Contains("1.4", StringComparison.Ordinal))
{
return _cdx14;
}
return ns;
}
private static string? ExtractSpecVersion(XNamespace ns)
{
if (ns == _cdx16 || ns.NamespaceName.Contains("1.6", StringComparison.Ordinal))
{
return "1.6";
}
if (ns == _cdx15 || ns.NamespaceName.Contains("1.5", StringComparison.Ordinal))
{
return "1.5";
}
if (ns == _cdx14 || ns.NamespaceName.Contains("1.4", StringComparison.Ordinal))
{
return "1.4";
}
return null;
}
private static void ExtractXmlComponents(
XElement componentsElement,
XNamespace ns,
List<string> bomRefs,
List<string> purls,
ref int count)
{
foreach (var component in componentsElement.Elements(ns + "component"))
{
count++;
var bomRef = component.Attribute("bom-ref")?.Value;
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = component.Element(ns + "purl")?.Value;
if (purl != null)
{
purls.Add(purl);
}
var nested = component.Element(ns + "components");
if (nested != null)
{
ExtractXmlComponents(nested, ns, bomRefs, purls, ref count);
}
}
}
}

View File

@@ -2,552 +2,21 @@
// CycloneDxExtractor.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Standalone service for extracting metadata from CycloneDX SBOMs
// Description: CycloneDX metadata extractor entry point
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Concelier.SbomIntegration.Parsing;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Xml;
using System.Xml.Linq;
namespace StellaOps.Artifact.Core;
/// <summary>
/// Extracts metadata from CycloneDX SBOM documents.
/// </summary>
public interface ICycloneDxExtractor
{
/// <summary>
/// Extracts metadata from a CycloneDX JSON document.
/// </summary>
CycloneDxMetadata Extract(JsonDocument document);
/// <summary>
/// Extracts metadata from a CycloneDX JSON stream.
/// </summary>
Task<CycloneDxMetadata> ExtractAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Extracts enriched SBOM data from a CycloneDX JSON document.
/// </summary>
ParsedSbom ExtractParsed(JsonDocument document);
/// <summary>
/// Extracts enriched SBOM data from a CycloneDX JSON stream.
/// </summary>
Task<ParsedSbom> ExtractParsedAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Extracts metadata from a CycloneDX XML document.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
CycloneDxMetadata ExtractFromXml(XDocument document);
/// <summary>
/// Extracts metadata from a CycloneDX XML stream.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
Task<CycloneDxMetadata> ExtractFromXmlAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Auto-detects format (JSON or XML) and extracts metadata.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
Task<CycloneDxMetadata> ExtractAutoAsync(Stream stream, CancellationToken ct = default);
}
/// <summary>
/// Extracted metadata from a CycloneDX document.
/// </summary>
public sealed record CycloneDxMetadata
{
/// <summary>SBOM serial number (URN).</summary>
public string? SerialNumber { get; init; }
/// <summary>SBOM version.</summary>
public int Version { get; init; }
/// <summary>CycloneDX spec version.</summary>
public string? SpecVersion { get; init; }
/// <summary>Primary component bom-ref.</summary>
public string? PrimaryBomRef { get; init; }
/// <summary>Primary component name.</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>Primary component purl.</summary>
public string? PrimaryPurl { get; init; }
/// <summary>All component bom-refs.</summary>
public IReadOnlyList<string> ComponentBomRefs { get; init; } = [];
/// <summary>All component purls.</summary>
public IReadOnlyList<string> ComponentPurls { get; init; } = [];
/// <summary>Total component count.</summary>
public int ComponentCount { get; init; }
/// <summary>Timestamp from metadata.</summary>
public DateTimeOffset? Timestamp { get; init; }
/// <summary>Extraction succeeded.</summary>
public bool Success { get; init; }
/// <summary>Extraction error if failed.</summary>
public string? Error { get; init; }
}
/// <summary>
/// Default implementation of CycloneDX extractor.
/// </summary>
public sealed class CycloneDxExtractor : ICycloneDxExtractor
public sealed partial class CycloneDxExtractor : ICycloneDxExtractor
{
private readonly IParsedSbomParser _parser;
public CycloneDxExtractor()
: this(new ParsedSbomParser(NullLogger<ParsedSbomParser>.Instance))
{
}
public CycloneDxExtractor(IParsedSbomParser parser)
{
_parser = parser ?? throw new ArgumentNullException(nameof(parser));
}
/// <inheritdoc />
public ParsedSbom ExtractParsed(JsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var payload = Encoding.UTF8.GetBytes(document.RootElement.GetRawText());
using var stream = new MemoryStream(payload);
return _parser.ParseAsync(stream, SbomFormat.CycloneDX).GetAwaiter().GetResult();
}
/// <inheritdoc />
public async Task<ParsedSbom> ExtractParsedAsync(Stream stream, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(stream);
if (stream.CanSeek)
{
stream.Position = 0;
}
return await _parser.ParseAsync(stream, SbomFormat.CycloneDX, ct);
}
/// <inheritdoc />
public CycloneDxMetadata Extract(JsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
try
{
var root = document.RootElement;
var parsed = ExtractParsed(document);
var version = ExtractBomVersion(root);
var bomRefs = new List<string>();
var purls = new List<string>();
int componentCount = 0;
if (root.TryGetProperty("components", out var components) &&
components.ValueKind == JsonValueKind.Array)
{
foreach (var component in components.EnumerateArray())
{
componentCount++;
var bomRef = GetStringProperty(component, "bom-ref");
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = GetStringProperty(component, "purl");
if (purl != null)
{
purls.Add(purl);
}
// Recursively extract from nested components
ExtractNestedComponents(component, bomRefs, purls, ref componentCount);
}
}
var primaryComponent = GetPrimaryComponent(parsed);
return new CycloneDxMetadata
{
SerialNumber = NormalizeParsedString(parsed.SerialNumber),
Version = version,
SpecVersion = NormalizeParsedString(parsed.SpecVersion),
PrimaryBomRef = NormalizeParsedString(parsed.Metadata.RootComponentRef),
PrimaryName = NormalizeParsedString(parsed.Metadata.Name)
?? NormalizeParsedString(primaryComponent?.Name),
PrimaryVersion = NormalizeParsedString(parsed.Metadata.Version)
?? NormalizeParsedString(primaryComponent?.Version),
PrimaryPurl = NormalizeParsedString(primaryComponent?.Purl),
ComponentBomRefs = bomRefs,
ComponentPurls = purls,
ComponentCount = componentCount,
Timestamp = parsed.Metadata.Timestamp,
Success = true
};
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractAsync(Stream stream, CancellationToken ct = default)
{
try
{
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: ct);
return Extract(document);
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
private static string? GetStringProperty(JsonElement element, string propertyName)
{
return element.TryGetProperty(propertyName, out var prop) ? prop.GetString() : null;
}
private static void ExtractNestedComponents(
JsonElement component,
List<string> bomRefs,
List<string> purls,
ref int count)
{
if (!component.TryGetProperty("components", out var nested))
return;
foreach (var child in nested.EnumerateArray())
{
count++;
var bomRef = GetStringProperty(child, "bom-ref");
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = GetStringProperty(child, "purl");
if (purl != null)
{
purls.Add(purl);
}
// Recurse
ExtractNestedComponents(child, bomRefs, purls, ref count);
}
}
// -------------------------------------------------------------------------
// XML Parsing - Sprint: SPRINT_20260118_017 (AS-004)
// -------------------------------------------------------------------------
private static readonly XNamespace Cdx14 = "http://cyclonedx.org/schema/bom/1.4";
private static readonly XNamespace Cdx15 = "http://cyclonedx.org/schema/bom/1.5";
private static readonly XNamespace Cdx16 = "http://cyclonedx.org/schema/bom/1.6";
/// <inheritdoc />
public CycloneDxMetadata ExtractFromXml(XDocument document)
{
ArgumentNullException.ThrowIfNull(document);
try
{
var root = document.Root;
if (root == null)
{
return new CycloneDxMetadata { Success = false, Error = "Empty XML document" };
}
// Detect namespace
var ns = DetectNamespace(root);
// Extract serial number (attribute on root)
string? serialNumber = root.Attribute("serialNumber")?.Value;
// Extract version
int version = 1;
var versionAttr = root.Attribute("version")?.Value;
if (int.TryParse(versionAttr, out var v))
{
version = v;
}
// Extract spec version from namespace
string? specVersion = ExtractSpecVersion(ns);
// Extract primary component from metadata
string? primaryBomRef = null;
string? primaryName = null;
string? primaryVersion = null;
string? primaryPurl = null;
var metadata = root.Element(ns + "metadata");
if (metadata != null)
{
var primaryComponent = metadata.Element(ns + "component");
if (primaryComponent != null)
{
primaryBomRef = primaryComponent.Attribute("bom-ref")?.Value;
primaryName = primaryComponent.Element(ns + "name")?.Value;
primaryVersion = primaryComponent.Element(ns + "version")?.Value;
primaryPurl = primaryComponent.Element(ns + "purl")?.Value;
}
}
// Extract timestamp
DateTimeOffset? timestamp = null;
var tsElement = metadata?.Element(ns + "timestamp");
if (tsElement != null && DateTimeOffset.TryParse(tsElement.Value, out var ts))
{
timestamp = ts;
}
// Extract all components
var bomRefs = new List<string>();
var purls = new List<string>();
int componentCount = 0;
var componentsElement = root.Element(ns + "components");
if (componentsElement != null)
{
ExtractXmlComponents(componentsElement, ns, bomRefs, purls, ref componentCount);
}
return new CycloneDxMetadata
{
SerialNumber = serialNumber,
Version = version,
SpecVersion = specVersion,
PrimaryBomRef = primaryBomRef,
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
PrimaryPurl = primaryPurl,
ComponentBomRefs = bomRefs,
ComponentPurls = purls,
ComponentCount = componentCount,
Timestamp = timestamp,
Success = true
};
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractFromXmlAsync(Stream stream, CancellationToken ct = default)
{
try
{
var settings = new XmlReaderSettings
{
Async = true,
DtdProcessing = DtdProcessing.Prohibit,
XmlResolver = null
};
using var reader = XmlReader.Create(stream, settings);
var document = await XDocument.LoadAsync(reader, LoadOptions.None, ct);
return ExtractFromXml(document);
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractAutoAsync(Stream stream, CancellationToken ct = default)
{
// Read first bytes to detect format
var buffer = new byte[1];
var bytesRead = await stream.ReadAsync(buffer, ct);
if (bytesRead == 0)
{
return new CycloneDxMetadata { Success = false, Error = "Empty stream" };
}
// Reset stream
stream.Position = 0;
// Detect format by first character
char firstChar = (char)buffer[0];
// Skip BOM if present
if (firstChar == '\uFEFF')
{
buffer = new byte[1];
await stream.ReadExactlyAsync(buffer, ct);
stream.Position = 0;
// Skip 3-byte UTF-8 BOM
if (stream.Length >= 3)
{
var bomBuffer = new byte[3];
await stream.ReadExactlyAsync(bomBuffer, ct);
if (bomBuffer[0] == 0xEF && bomBuffer[1] == 0xBB && bomBuffer[2] == 0xBF)
{
firstChar = (char)stream.ReadByte();
stream.Position = 3; // After BOM
}
else
{
stream.Position = 0;
firstChar = (char)buffer[0];
}
}
}
stream.Position = 0;
if (firstChar == '{' || firstChar == '[')
{
// JSON format
return await ExtractAsync(stream, ct);
}
else if (firstChar == '<')
{
// XML format
return await ExtractFromXmlAsync(stream, ct);
}
else
{
// Try JSON first, fallback to XML
try
{
return await ExtractAsync(stream, ct);
}
catch
{
stream.Position = 0;
return await ExtractFromXmlAsync(stream, ct);
}
}
}
private static XNamespace DetectNamespace(XElement root)
{
var ns = root.Name.Namespace;
if (ns == Cdx16 || ns.NamespaceName.Contains("1.6"))
return Cdx16;
if (ns == Cdx15 || ns.NamespaceName.Contains("1.5"))
return Cdx15;
if (ns == Cdx14 || ns.NamespaceName.Contains("1.4"))
return Cdx14;
// Default to detected namespace
return ns;
}
private static string? ExtractSpecVersion(XNamespace ns)
{
if (ns == Cdx16 || ns.NamespaceName.Contains("1.6"))
return "1.6";
if (ns == Cdx15 || ns.NamespaceName.Contains("1.5"))
return "1.5";
if (ns == Cdx14 || ns.NamespaceName.Contains("1.4"))
return "1.4";
return null;
}
private static int ExtractBomVersion(JsonElement root)
{
if (root.TryGetProperty("version", out var versionProp) &&
versionProp.ValueKind == JsonValueKind.Number &&
versionProp.TryGetInt32(out var version))
{
return version;
}
return 1;
}
private static ParsedComponent? GetPrimaryComponent(ParsedSbom parsed)
{
if (string.IsNullOrWhiteSpace(parsed.Metadata.RootComponentRef))
{
return null;
}
return parsed.Components.FirstOrDefault(component =>
string.Equals(component.BomRef, parsed.Metadata.RootComponentRef, StringComparison.Ordinal));
}
private static string? NormalizeParsedString(string? value)
{
return string.IsNullOrWhiteSpace(value) ? null : value;
}
private static void ExtractXmlComponents(
XElement componentsElement,
XNamespace ns,
List<string> bomRefs,
List<string> purls,
ref int count)
{
foreach (var component in componentsElement.Elements(ns + "component"))
{
count++;
var bomRef = component.Attribute("bom-ref")?.Value;
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = component.Element(ns + "purl")?.Value;
if (purl != null)
{
purls.Add(purl);
}
// Recurse into nested components
var nested = component.Element(ns + "components");
if (nested != null)
{
ExtractXmlComponents(nested, ns, bomRefs, purls, ref count);
}
}
}
}

View File

@@ -0,0 +1,52 @@
// -----------------------------------------------------------------------------
// CycloneDxMetadata.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Metadata extracted from CycloneDX documents
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Extracted metadata from a CycloneDX document.
/// </summary>
public sealed record CycloneDxMetadata
{
/// <summary>SBOM serial number (URN).</summary>
public string? SerialNumber { get; init; }
/// <summary>SBOM version.</summary>
public int Version { get; init; }
/// <summary>CycloneDX spec version.</summary>
public string? SpecVersion { get; init; }
/// <summary>Primary component bom-ref.</summary>
public string? PrimaryBomRef { get; init; }
/// <summary>Primary component name.</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>Primary component purl.</summary>
public string? PrimaryPurl { get; init; }
/// <summary>All component bom-refs.</summary>
public IReadOnlyList<string> ComponentBomRefs { get; init; } = [];
/// <summary>All component purls.</summary>
public IReadOnlyList<string> ComponentPurls { get; init; } = [];
/// <summary>Total component count.</summary>
public int ComponentCount { get; init; }
/// <summary>Timestamp from metadata.</summary>
public DateTimeOffset? Timestamp { get; init; }
/// <summary>Extraction succeeded.</summary>
public bool Success { get; init; }
/// <summary>Extraction error if failed.</summary>
public string? Error { get; init; }
}

View File

@@ -4,7 +4,6 @@
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Unified artifact storage interface with bom-ref support
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
@@ -55,325 +54,3 @@ public interface IArtifactStore
/// </summary>
Task<bool> DeleteAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
}
/// <summary>
/// Request to store an artifact.
/// </summary>
public sealed record ArtifactStoreRequest
{
/// <summary>
/// Package URL (purl) or CycloneDX bom-ref.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber URN (e.g., urn:uuid:...).
/// </summary>
public required string SerialNumber { get; init; }
/// <summary>
/// Unique artifact identifier (e.g., DSSE UUID, hash).
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Artifact content stream.
/// </summary>
public required Stream Content { get; init; }
/// <summary>
/// Content type (MIME type).
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// Artifact type classification.
/// </summary>
public ArtifactType Type { get; init; } = ArtifactType.Unknown;
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// Tenant ID for multi-tenancy.
/// </summary>
public Guid TenantId { get; init; }
/// <summary>
/// Whether to overwrite existing artifact.
/// </summary>
public bool Overwrite { get; init; } = false;
}
/// <summary>
/// Result of storing an artifact.
/// </summary>
public sealed record ArtifactStoreResult
{
/// <summary>
/// Whether storage was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Storage key (full path).
/// </summary>
public string? StorageKey { get; init; }
/// <summary>
/// SHA-256 hash of stored content.
/// </summary>
public string? Sha256 { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long? SizeBytes { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Whether this was a new artifact or an update.
/// </summary>
public bool WasCreated { get; init; }
/// <summary>
/// Creates a success result.
/// </summary>
public static ArtifactStoreResult Succeeded(string storageKey, string sha256, long sizeBytes, bool wasCreated = true)
{
return new ArtifactStoreResult
{
Success = true,
StorageKey = storageKey,
Sha256 = sha256,
SizeBytes = sizeBytes,
WasCreated = wasCreated
};
}
/// <summary>
/// Creates a failure result.
/// </summary>
public static ArtifactStoreResult Failed(string errorMessage)
{
return new ArtifactStoreResult
{
Success = false,
ErrorMessage = errorMessage
};
}
}
/// <summary>
/// Result of reading an artifact.
/// </summary>
public sealed record ArtifactReadResult : IDisposable
{
/// <summary>
/// Whether the artifact was found.
/// </summary>
public required bool Found { get; init; }
/// <summary>
/// Content stream (caller must dispose).
/// </summary>
public Stream? Content { get; init; }
/// <summary>
/// Artifact metadata.
/// </summary>
public ArtifactMetadata? Metadata { get; init; }
/// <summary>
/// Error message if not found.
/// </summary>
public string? ErrorMessage { get; init; }
/// <inheritdoc />
public void Dispose()
{
Content?.Dispose();
}
/// <summary>
/// Creates a found result.
/// </summary>
public static ArtifactReadResult Succeeded(Stream content, ArtifactMetadata metadata)
{
return new ArtifactReadResult
{
Found = true,
Content = content,
Metadata = metadata
};
}
/// <summary>
/// Creates a not found result.
/// </summary>
public static ArtifactReadResult NotFound(string? message = null)
{
return new ArtifactReadResult
{
Found = false,
ErrorMessage = message ?? "Artifact not found"
};
}
}
/// <summary>
/// Artifact metadata.
/// </summary>
public sealed record ArtifactMetadata
{
/// <summary>
/// Full storage key/path.
/// </summary>
public required string StorageKey { get; init; }
/// <summary>
/// Package URL or bom-ref.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber.
/// </summary>
public required string SerialNumber { get; init; }
/// <summary>
/// Artifact ID.
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Content type (MIME).
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// SHA-256 hash.
/// </summary>
public required string Sha256 { get; init; }
/// <summary>
/// Creation timestamp.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Artifact type.
/// </summary>
public ArtifactType Type { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public Guid TenantId { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? ExtraMetadata { get; init; }
}
/// <summary>
/// Artifact type classification.
/// </summary>
public enum ArtifactType
{
/// <summary>Unknown type.</summary>
Unknown,
/// <summary>SBOM (CycloneDX or SPDX).</summary>
Sbom,
/// <summary>VEX document.</summary>
Vex,
/// <summary>DSSE envelope/attestation.</summary>
DsseEnvelope,
/// <summary>Rekor transparency log proof.</summary>
RekorProof,
/// <summary>Verdict record.</summary>
Verdict,
/// <summary>Policy bundle.</summary>
PolicyBundle,
/// <summary>Provenance attestation.</summary>
Provenance,
/// <summary>Build log.</summary>
BuildLog,
/// <summary>Test results.</summary>
TestResults,
/// <summary>Scan results.</summary>
ScanResults
}
/// <summary>
/// Utility for encoding bom-refs for path usage.
/// </summary>
public static class BomRefEncoder
{
/// <summary>
/// Encodes a bom-ref/purl for use in storage paths.
/// Handles special characters in purls (/, @, :, etc.).
/// </summary>
public static string Encode(string bomRef)
{
if (string.IsNullOrWhiteSpace(bomRef))
return "unknown";
// Replace path-unsafe characters
return bomRef
.Replace("/", "_")
.Replace(":", "_")
.Replace("@", "_at_")
.Replace("?", "_q_")
.Replace("#", "_h_")
.Replace("%", "_p_");
}
/// <summary>
/// Decodes an encoded bom-ref back to original form.
/// </summary>
public static string Decode(string encoded)
{
if (string.IsNullOrWhiteSpace(encoded))
return string.Empty;
return encoded
.Replace("_at_", "@")
.Replace("_q_", "?")
.Replace("_h_", "#")
.Replace("_p_", "%");
// Note: / and : remain as _ since they're ambiguous
}
/// <summary>
/// Builds the storage path for an artifact.
/// </summary>
public static string BuildPath(string bomRef, string serialNumber, string artifactId)
{
var encodedBomRef = Encode(bomRef);
var encodedSerial = Encode(serialNumber);
return $"artifacts/{encodedBomRef}/{encodedSerial}/{artifactId}.json";
}
}

View File

@@ -0,0 +1,50 @@
// -----------------------------------------------------------------------------
// ICycloneDxExtractor.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Interface for extracting metadata from CycloneDX SBOMs
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
using System.Text.Json;
using System.Xml.Linq;
namespace StellaOps.Artifact.Core;
/// <summary>
/// Extracts metadata from CycloneDX SBOM documents.
/// </summary>
public interface ICycloneDxExtractor
{
/// <summary>
/// Extracts metadata from a CycloneDX JSON document.
/// </summary>
CycloneDxMetadata Extract(JsonDocument document);
/// <summary>
/// Extracts metadata from a CycloneDX JSON stream.
/// </summary>
Task<CycloneDxMetadata> ExtractAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Extracts enriched SBOM data from a CycloneDX JSON stream.
/// </summary>
Task<ParsedSbom> ExtractParsedAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Extracts metadata from a CycloneDX XML document.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
CycloneDxMetadata ExtractFromXml(XDocument document);
/// <summary>
/// Extracts metadata from a CycloneDX XML stream.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
Task<CycloneDxMetadata> ExtractFromXmlAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Auto-detects format (JSON or XML) and extracts metadata.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
Task<CycloneDxMetadata> ExtractAutoAsync(Stream stream, CancellationToken ct = default);
}

View File

@@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| Task ID | Status | Notes |
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/__Libraries/StellaOps.Artifact.Core/StellaOps.Artifact.Core.md. |
| REMED-05 | DONE | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/__Libraries/StellaOps.Artifact.Core/StellaOps.Artifact.Core.md. Tests: `dotnet test src/__Libraries/StellaOps.Artifact.Core.Tests/StellaOps.Artifact.Core.Tests.csproj` (23 tests, MTP0001 warning). |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |

View File

@@ -0,0 +1,48 @@
// -----------------------------------------------------------------------------
// ArtifactDataSource.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: PostgreSQL data source for the Artifact module
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Infrastructure.Postgres.Connections;
using StellaOps.Infrastructure.Postgres.Options;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL data source for the Artifact module.
/// </summary>
public sealed class ArtifactDataSource : DataSourceBase
{
public const string DefaultSchemaName = "evidence";
public const string OptionsName = "Artifact";
public ArtifactDataSource(IOptionsMonitor<PostgresOptions> options, ILogger<ArtifactDataSource> logger)
: base(CreateOptions(options.Get(OptionsName)), logger)
{
}
protected override string ModuleName => "Artifact";
private static PostgresOptions CreateOptions(PostgresOptions baseOptions)
{
var schemaName = string.IsNullOrWhiteSpace(baseOptions.SchemaName)
? DefaultSchemaName
: baseOptions.SchemaName;
return new PostgresOptions
{
ConnectionString = baseOptions.ConnectionString,
CommandTimeoutSeconds = baseOptions.CommandTimeoutSeconds,
MaxPoolSize = baseOptions.MaxPoolSize,
MinPoolSize = baseOptions.MinPoolSize,
ConnectionIdleLifetimeSeconds = baseOptions.ConnectionIdleLifetimeSeconds,
Pooling = baseOptions.Pooling,
SchemaName = schemaName,
AutoMigrate = baseOptions.AutoMigrate,
MigrationsPath = baseOptions.MigrationsPath
};
}
}

View File

@@ -0,0 +1,57 @@
// -----------------------------------------------------------------------------
// ArtifactIndexEntry.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: Artifact index entry model for storage and querying
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Artifact index entry for PostgreSQL storage.
/// </summary>
public sealed record ArtifactIndexEntry
{
/// <summary>Primary key.</summary>
public Guid Id { get; init; } = Guid.NewGuid();
/// <summary>Tenant ID.</summary>
public required Guid TenantId { get; init; }
/// <summary>Package URL or bom-ref.</summary>
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber.</summary>
public required string SerialNumber { get; init; }
/// <summary>Artifact ID.</summary>
public required string ArtifactId { get; init; }
/// <summary>Full storage key/path.</summary>
public required string StorageKey { get; init; }
/// <summary>Artifact type.</summary>
public required ArtifactType Type { get; init; }
/// <summary>Content type (MIME).</summary>
public required string ContentType { get; init; }
/// <summary>SHA-256 hash.</summary>
public required string Sha256 { get; init; }
/// <summary>Size in bytes.</summary>
public required long SizeBytes { get; init; }
/// <summary>When the artifact was stored.</summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>When the index entry was last updated.</summary>
public DateTimeOffset? UpdatedAt { get; init; }
/// <summary>Whether the artifact has been deleted.</summary>
public bool IsDeleted { get; init; }
/// <summary>Deletion timestamp.</summary>
public DateTimeOffset? DeletedAt { get; init; }
}

View File

@@ -1,279 +0,0 @@
// -----------------------------------------------------------------------------
// ArtifactIndexRepository.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: PostgreSQL-backed artifact index for efficient querying
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL repository for artifact index.
/// Provides efficient bom-ref based querying.
/// </summary>
public interface IArtifactIndexRepository
{
/// <summary>
/// Indexes a stored artifact.
/// </summary>
Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by bom-ref.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by bom-ref and serial number.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default);
/// <summary>
/// Gets a specific artifact index entry.
/// </summary>
Task<ArtifactIndexEntry?> GetAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
/// <summary>
/// Removes an artifact from the index.
/// </summary>
Task<bool> RemoveAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by SHA-256 hash.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by type.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default);
}
/// <summary>
/// Artifact index entry for PostgreSQL storage.
/// </summary>
public sealed record ArtifactIndexEntry
{
/// <summary>Primary key.</summary>
public Guid Id { get; init; } = Guid.NewGuid();
/// <summary>Tenant ID.</summary>
public required Guid TenantId { get; init; }
/// <summary>Package URL or bom-ref.</summary>
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber.</summary>
public required string SerialNumber { get; init; }
/// <summary>Artifact ID.</summary>
public required string ArtifactId { get; init; }
/// <summary>Full storage key/path.</summary>
public required string StorageKey { get; init; }
/// <summary>Artifact type.</summary>
public required ArtifactType Type { get; init; }
/// <summary>Content type (MIME).</summary>
public required string ContentType { get; init; }
/// <summary>SHA-256 hash.</summary>
public required string Sha256 { get; init; }
/// <summary>Size in bytes.</summary>
public required long SizeBytes { get; init; }
/// <summary>When the artifact was stored.</summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>When the index entry was last updated.</summary>
public DateTimeOffset? UpdatedAt { get; init; }
/// <summary>Whether the artifact has been deleted.</summary>
public bool IsDeleted { get; init; }
/// <summary>Deletion timestamp.</summary>
public DateTimeOffset? DeletedAt { get; init; }
}
/// <summary>
/// In-memory implementation for testing.
/// </summary>
public sealed class InMemoryArtifactIndexRepository : IArtifactIndexRepository
{
private readonly List<ArtifactIndexEntry> _entries = new();
private readonly object _lock = new();
/// <inheritdoc />
public Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
{
lock (_lock)
{
// Remove existing entry if present
_entries.RemoveAll(e =>
e.BomRef == entry.BomRef &&
e.SerialNumber == entry.SerialNumber &&
e.ArtifactId == entry.ArtifactId);
_entries.Add(entry);
}
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.BomRef == bomRef && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.BomRef == bomRef && e.SerialNumber == serialNumber && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<ArtifactIndexEntry?> GetAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var entry = _entries.FirstOrDefault(e =>
e.BomRef == bomRef &&
e.SerialNumber == serialNumber &&
e.ArtifactId == artifactId &&
!e.IsDeleted);
return Task.FromResult(entry);
}
}
/// <inheritdoc />
public Task<bool> RemoveAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var entry = _entries.FirstOrDefault(e =>
e.BomRef == bomRef &&
e.SerialNumber == serialNumber &&
e.ArtifactId == artifactId &&
!e.IsDeleted);
if (entry != null)
{
var index = _entries.IndexOf(entry);
_entries[index] = entry with
{
IsDeleted = true,
DeletedAt = DateTimeOffset.UtcNow
};
return Task.FromResult(true);
}
return Task.FromResult(false);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.Sha256 == sha256 && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.Type == type && e.TenantId == tenantId && !e.IsDeleted)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
}
/// <summary>
/// PostgreSQL artifact index table schema.
/// </summary>
public static class ArtifactIndexSchema
{
/// <summary>
/// SQL migration to create the artifact index table.
/// </summary>
public const string CreateTableSql = """
CREATE TABLE IF NOT EXISTS evidence.artifact_index (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
bom_ref TEXT NOT NULL,
serial_number TEXT NOT NULL,
artifact_id TEXT NOT NULL,
storage_key TEXT NOT NULL,
artifact_type TEXT NOT NULL,
content_type TEXT NOT NULL,
sha256 TEXT NOT NULL,
size_bytes BIGINT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
deleted_at TIMESTAMPTZ,
CONSTRAINT uq_artifact_index_key UNIQUE (tenant_id, bom_ref, serial_number, artifact_id)
);
-- Index for bom-ref queries (most common)
CREATE INDEX IF NOT EXISTS idx_artifact_index_bom_ref
ON evidence.artifact_index (tenant_id, bom_ref)
WHERE NOT is_deleted;
-- Index for SHA-256 lookups (deduplication)
CREATE INDEX IF NOT EXISTS idx_artifact_index_sha256
ON evidence.artifact_index (sha256)
WHERE NOT is_deleted;
-- Index for type-based queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_type
ON evidence.artifact_index (tenant_id, artifact_type)
WHERE NOT is_deleted;
-- Index for serial number + bom-ref compound queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_serial
ON evidence.artifact_index (tenant_id, bom_ref, serial_number)
WHERE NOT is_deleted;
""";
}

View File

@@ -0,0 +1,57 @@
// -----------------------------------------------------------------------------
// ArtifactIndexSchema.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: SQL schema for the artifact index table
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL artifact index table schema.
/// </summary>
public static class ArtifactIndexSchema
{
/// <summary>
/// SQL migration to create the artifact index table.
/// </summary>
public const string CreateTableSql = """
CREATE TABLE IF NOT EXISTS evidence.artifact_index (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
bom_ref TEXT NOT NULL,
serial_number TEXT NOT NULL,
artifact_id TEXT NOT NULL,
storage_key TEXT NOT NULL,
artifact_type TEXT NOT NULL,
content_type TEXT NOT NULL,
sha256 TEXT NOT NULL,
size_bytes BIGINT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
deleted_at TIMESTAMPTZ,
CONSTRAINT uq_artifact_index_key UNIQUE (tenant_id, bom_ref, serial_number, artifact_id)
);
-- Index for bom-ref queries (most common)
CREATE INDEX IF NOT EXISTS idx_artifact_index_bom_ref
ON evidence.artifact_index (tenant_id, bom_ref)
WHERE NOT is_deleted;
-- Index for SHA-256 lookups (deduplication)
CREATE INDEX IF NOT EXISTS idx_artifact_index_sha256
ON evidence.artifact_index (sha256)
WHERE NOT is_deleted;
-- Index for type-based queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_type
ON evidence.artifact_index (tenant_id, artifact_type)
WHERE NOT is_deleted;
-- Index for serial number + bom-ref compound queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_serial
ON evidence.artifact_index (tenant_id, bom_ref, serial_number)
WHERE NOT is_deleted;
""";
}

View File

@@ -0,0 +1,99 @@
// -----------------------------------------------------------------------------
// ArtifactIndexSql.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: SQL statements for artifact index repository
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
internal static class ArtifactIndexSql
{
public const string Insert = """
INSERT INTO evidence.artifact_index (
id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at
) VALUES (
@id, @tenant_id, @bom_ref, @serial_number, @artifact_id, @storage_key,
@artifact_type, @content_type, @sha256, @size_bytes, @created_at
)
ON CONFLICT (tenant_id, bom_ref, serial_number, artifact_id)
DO UPDATE SET
storage_key = EXCLUDED.storage_key,
artifact_type = EXCLUDED.artifact_type,
content_type = EXCLUDED.content_type,
sha256 = EXCLUDED.sha256,
size_bytes = EXCLUDED.size_bytes,
updated_at = NOW(),
is_deleted = FALSE,
deleted_at = NULL
""";
public const string SelectByBomRef = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND NOT is_deleted
ORDER BY created_at DESC
""";
public const string SelectByBomRefAndSerial = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number AND NOT is_deleted
ORDER BY created_at DESC
""";
public const string SelectByKey = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
AND artifact_id = @artifact_id AND NOT is_deleted
""";
public const string UpdateSoftDelete = """
UPDATE evidence.artifact_index
SET is_deleted = TRUE, deleted_at = NOW(), updated_at = NOW()
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
AND artifact_id = @artifact_id AND NOT is_deleted
""";
public const string SelectBySha256 = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE sha256 = @sha256 AND NOT is_deleted
ORDER BY created_at DESC
LIMIT 100
""";
public const string SelectByType = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND artifact_type = @artifact_type AND NOT is_deleted
ORDER BY created_at DESC
LIMIT @limit
""";
public const string SelectByTimeRange = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND created_at >= @from AND created_at < @to AND NOT is_deleted
ORDER BY created_at DESC
LIMIT @limit
""";
public const string CountByTenant = """
SELECT COUNT(*) FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND NOT is_deleted
""";
}

View File

@@ -0,0 +1,38 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationOptions.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Options for legacy artifact migration
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Migration options.
/// </summary>
public sealed class ArtifactMigrationOptions
{
/// <summary>
/// Maximum number of parallel migrations.
/// </summary>
public int MaxParallelism { get; set; } = 4;
/// <summary>
/// Batch size for processing.
/// </summary>
public int BatchSize { get; set; } = 100;
/// <summary>
/// Whether to copy (preserve original) or move.
/// </summary>
public bool CopyMode { get; set; } = true;
/// <summary>
/// Skip artifacts that already exist in the unified store.
/// </summary>
public bool SkipExisting { get; set; } = true;
/// <summary>
/// Whether to write a migration log.
/// </summary>
public bool EnableLogging { get; set; } = true;
}

View File

@@ -0,0 +1,21 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationResult.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Result model for a single migration
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Result of migrating a single artifact.
/// </summary>
public sealed record ArtifactMigrationResult
{
public required string OriginalPath { get; init; }
public required string? NewPath { get; init; }
public required bool Success { get; init; }
public required bool Skipped { get; init; }
public string? BomRef { get; init; }
public string? SerialNumber { get; init; }
public string? ErrorMessage { get; init; }
}

View File

@@ -0,0 +1,33 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Batch.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Batch execution helpers for artifact migration
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private async Task<ArtifactMigrationResult> QueueMigrationAsync(
LegacyArtifact legacy,
SemaphoreSlim semaphore,
CancellationToken ct)
{
await semaphore.WaitAsync(ct).ConfigureAwait(false);
try
{
return await MigrateOneAsync(legacy, ct).ConfigureAwait(false);
}
finally
{
semaphore.Release();
}
}
private static async Task<IReadOnlyList<ArtifactMigrationResult>> ProcessBatchAsync(
List<Task<ArtifactMigrationResult>> batch)
{
var results = await Task.WhenAll(batch).ConfigureAwait(false);
return results;
}
}

View File

@@ -0,0 +1,47 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Content.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Legacy content access and identifier extraction
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private async Task<MemoryStream?> ReadLegacyContentAsync(LegacyArtifact legacy, CancellationToken ct)
{
await using var sourceStream = await _source.ReadAsync(legacy.LegacyPath, ct).ConfigureAwait(false);
if (sourceStream == null)
{
return null;
}
var buffer = new MemoryStream();
await sourceStream.CopyToAsync(buffer, ct).ConfigureAwait(false);
buffer.Position = 0;
return buffer;
}
private async Task<MigrationIdentifiers> ResolveIdentifiersAsync(
LegacyArtifact legacy,
MemoryStream content,
CancellationToken ct)
{
if (!IsSbomContent(legacy.ContentType))
{
return new MigrationIdentifiers(GenerateFallbackBomRef(legacy), GenerateFallbackSerial(legacy));
}
var metadata = await _extractor.ExtractAsync(content, ct).ConfigureAwait(false);
content.Position = 0;
if (!metadata.Success)
{
return new MigrationIdentifiers(GenerateFallbackBomRef(legacy), GenerateFallbackSerial(legacy));
}
var bomRef = metadata.PrimaryPurl ?? metadata.PrimaryBomRef ?? GenerateFallbackBomRef(legacy);
var serialNumber = metadata.SerialNumber ?? GenerateFallbackSerial(legacy);
return new MigrationIdentifiers(bomRef, serialNumber);
}
}

View File

@@ -0,0 +1,81 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Fallbacks.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Fallback identifier helpers for migration
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
using System.Security.Cryptography;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private readonly record struct MigrationIdentifiers(string BomRef, string SerialNumber);
private static bool IsSbomContent(string contentType)
{
return contentType.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)
|| contentType.Contains("spdx", StringComparison.OrdinalIgnoreCase)
|| contentType == "application/json";
}
private static string GenerateFallbackBomRef(LegacyArtifact legacy)
{
var sanitized = legacy.LegacyPath
.Replace("\\", "/")
.Replace("tenants/", "")
.Replace("bundles/", "");
return $"pkg:stella/legacy/{Uri.EscapeDataString(sanitized)}";
}
private static string GenerateFallbackSerial(LegacyArtifact legacy)
{
using var sha = SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(legacy.LegacyPath));
var guid = new Guid(hash.Take(16).ToArray());
return $"urn:uuid:{guid}";
}
private string GenerateArtifactId(LegacyArtifact legacy)
{
var fileName = Path.GetFileNameWithoutExtension(legacy.LegacyPath);
return !string.IsNullOrWhiteSpace(fileName)
? fileName
: _guidProvider.NewGuid().ToString("D");
}
private static ArtifactType InferArtifactType(string contentType, string path)
{
if (contentType.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase) ||
contentType.Contains("spdx", StringComparison.OrdinalIgnoreCase))
{
return ArtifactType.Sbom;
}
if (contentType.Contains("vex", StringComparison.OrdinalIgnoreCase) ||
contentType.Contains("openvex", StringComparison.OrdinalIgnoreCase))
{
return ArtifactType.Vex;
}
if (contentType.Contains("dsse", StringComparison.OrdinalIgnoreCase) ||
path.Contains("dsse", StringComparison.OrdinalIgnoreCase))
{
return ArtifactType.DsseEnvelope;
}
if (path.Contains("rekor", StringComparison.OrdinalIgnoreCase))
{
return ArtifactType.RekorProof;
}
if (path.Contains("verdict", StringComparison.OrdinalIgnoreCase))
{
return ArtifactType.Verdict;
}
return ArtifactType.Unknown;
}
}

View File

@@ -0,0 +1,36 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Logging.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Logging helpers for migration lifecycle
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private void LogStart(int totalCount)
{
if (!_options.EnableLogging)
{
return;
}
_logger.LogInformation("Starting migration of {Count} artifacts", totalCount);
}
private void LogCompletion(ArtifactMigrationState state)
{
if (!_options.EnableLogging)
{
return;
}
_logger.LogInformation(
"Migration completed: {Succeeded} succeeded, {Failed} failed, {Skipped} skipped out of {Total}",
state.SuccessCount,
state.FailureCount,
state.SkippedCount,
state.TotalItems);
}
}

View File

@@ -0,0 +1,55 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Migrate.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Batch orchestration for artifact migration
// -----------------------------------------------------------------------------
using System.Runtime.CompilerServices;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
/// <summary>
/// Runs the migration asynchronously, reporting progress.
/// </summary>
public async IAsyncEnumerable<ArtifactMigrationResult> MigrateAsync(
IProgress<MigrationProgress>? progress = null,
[EnumeratorCancellation] CancellationToken ct = default)
{
var totalCount = await _source.CountAsync(ct).ConfigureAwait(false);
var state = new ArtifactMigrationState(totalCount, _timeProvider);
LogStart(totalCount);
using var semaphore = new SemaphoreSlim(_options.MaxParallelism);
var batch = new List<Task<ArtifactMigrationResult>>(_options.BatchSize);
await foreach (var legacy in _source.EnumerateAsync(ct).ConfigureAwait(false))
{
batch.Add(QueueMigrationAsync(legacy, semaphore, ct));
if (batch.Count < _options.BatchSize)
{
continue;
}
foreach (var result in await ProcessBatchAsync(batch).ConfigureAwait(false))
{
progress?.Report(state.Apply(result));
yield return result;
}
batch.Clear();
}
if (batch.Count > 0)
{
foreach (var result in await ProcessBatchAsync(batch).ConfigureAwait(false))
{
progress?.Report(state.Apply(result));
yield return result;
}
}
LogCompletion(state);
}
}

View File

@@ -0,0 +1,41 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.MigrateOne.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Single-artifact migration logic
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private async Task<ArtifactMigrationResult> MigrateOneAsync(LegacyArtifact legacy, CancellationToken ct)
{
try
{
using var content = await ReadLegacyContentAsync(legacy, ct).ConfigureAwait(false);
if (content == null)
{
return CreateFailureResult(legacy, "Content not found");
}
var identifiers = await ResolveIdentifiersAsync(legacy, content, ct).ConfigureAwait(false);
var artifactId = GenerateArtifactId(legacy);
if (await ShouldSkipAsync(identifiers, artifactId, ct).ConfigureAwait(false))
{
return CreateSkippedResult(legacy, identifiers);
}
var request = BuildStoreRequest(legacy, identifiers, artifactId, content);
var result = await _targetStore.StoreAsync(request, ct).ConfigureAwait(false);
return CreateStoreResult(legacy, identifiers, result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to migrate {Path}", legacy.LegacyPath);
return CreateFailureResult(legacy, ex.Message);
}
}
}

View File

@@ -0,0 +1,54 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Requests.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Store request construction and skip checks
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private ArtifactStoreRequest BuildStoreRequest(
LegacyArtifact legacy,
MigrationIdentifiers identifiers,
string artifactId,
MemoryStream content)
{
content.Position = 0;
return new ArtifactStoreRequest
{
BomRef = identifiers.BomRef,
SerialNumber = identifiers.SerialNumber,
ArtifactId = artifactId,
Content = content,
ContentType = legacy.ContentType,
Type = InferArtifactType(legacy.ContentType, legacy.LegacyPath),
TenantId = legacy.TenantId,
Overwrite = false,
Metadata = new Dictionary<string, string>
{
["legacy_path"] = legacy.LegacyPath,
["migrated_at"] = _timeProvider.GetUtcNow().ToString("O")
}
};
}
private async Task<bool> ShouldSkipAsync(
MigrationIdentifiers identifiers,
string artifactId,
CancellationToken ct)
{
if (!_options.SkipExisting)
{
return false;
}
return await _targetStore.ExistsAsync(
identifiers.BomRef,
identifiers.SerialNumber,
artifactId,
ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,56 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.Results.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Result composition helpers for migration
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class ArtifactMigrationService
{
private static ArtifactMigrationResult CreateFailureResult(LegacyArtifact legacy, string message)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = false,
Skipped = false,
ErrorMessage = message
};
}
private static ArtifactMigrationResult CreateSkippedResult(
LegacyArtifact legacy,
MigrationIdentifiers identifiers)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = true,
Skipped = true,
BomRef = identifiers.BomRef,
SerialNumber = identifiers.SerialNumber
};
}
private static ArtifactMigrationResult CreateStoreResult(
LegacyArtifact legacy,
MigrationIdentifiers identifiers,
ArtifactStoreResult storeResult)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = storeResult.StorageKey,
Success = storeResult.Success,
Skipped = false,
BomRef = identifiers.BomRef,
SerialNumber = identifiers.SerialNumber,
ErrorMessage = storeResult.ErrorMessage
};
}
}

View File

@@ -4,120 +4,23 @@
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Migrates existing evidence from legacy paths to unified store
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
using System.Runtime.CompilerServices;
using StellaOps.Determinism;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Migration options.
/// </summary>
public sealed class ArtifactMigrationOptions
{
/// <summary>
/// Maximum number of parallel migrations.
/// </summary>
public int MaxParallelism { get; set; } = 4;
/// <summary>
/// Batch size for processing.
/// </summary>
public int BatchSize { get; set; } = 100;
/// <summary>
/// Whether to copy (preserve original) or move.
/// </summary>
public bool CopyMode { get; set; } = true;
/// <summary>
/// Skip artifacts that already exist in the unified store.
/// </summary>
public bool SkipExisting { get; set; } = true;
/// <summary>
/// Whether to write a migration log.
/// </summary>
public bool EnableLogging { get; set; } = true;
}
/// <summary>
/// Progress report for migration.
/// </summary>
public sealed record MigrationProgress
{
public int TotalItems { get; init; }
public int ProcessedItems { get; init; }
public int SuccessCount { get; init; }
public int FailureCount { get; init; }
public int SkippedCount { get; init; }
public DateTimeOffset StartedAt { get; init; }
public DateTimeOffset LastUpdateAt { get; init; }
public string CurrentItem { get; init; } = string.Empty;
public TimeSpan EstimatedRemaining => ProcessedItems > 0
? TimeSpan.FromSeconds((TotalItems - ProcessedItems) * (LastUpdateAt - StartedAt).TotalSeconds / ProcessedItems)
: TimeSpan.Zero;
}
/// <summary>
/// Result of migrating a single artifact.
/// </summary>
public sealed record ArtifactMigrationResult
{
public required string OriginalPath { get; init; }
public required string? NewPath { get; init; }
public required bool Success { get; init; }
public required bool Skipped { get; init; }
public string? BomRef { get; init; }
public string? SerialNumber { get; init; }
public string? ErrorMessage { get; init; }
}
/// <summary>
/// Legacy artifact source for migration.
/// </summary>
public interface ILegacyArtifactSource
{
/// <summary>
/// Enumerates all artifacts in the legacy store.
/// </summary>
IAsyncEnumerable<LegacyArtifact> EnumerateAsync(CancellationToken ct = default);
/// <summary>
/// Gets the total count of artifacts.
/// </summary>
Task<int> CountAsync(CancellationToken ct = default);
/// <summary>
/// Reads content from a legacy path.
/// </summary>
Task<Stream?> ReadAsync(string legacyPath, CancellationToken ct = default);
}
/// <summary>
/// Legacy artifact descriptor.
/// </summary>
public sealed record LegacyArtifact
{
public required string LegacyPath { get; init; }
public required string ContentType { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public Guid TenantId { get; init; }
public string? BundleId { get; init; }
}
/// <summary>
/// Service for migrating legacy evidence to unified artifact store.
/// </summary>
public sealed class ArtifactMigrationService
public sealed partial class ArtifactMigrationService
{
private readonly IArtifactStore _targetStore;
private readonly ILegacyArtifactSource _source;
private readonly ICycloneDxExtractor _extractor;
private readonly ArtifactMigrationOptions _options;
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
private readonly ILogger<ArtifactMigrationService> _logger;
public ArtifactMigrationService(
@@ -125,284 +28,16 @@ public sealed class ArtifactMigrationService
ILegacyArtifactSource source,
ICycloneDxExtractor extractor,
ArtifactMigrationOptions options,
TimeProvider timeProvider,
IGuidProvider guidProvider,
ILogger<ArtifactMigrationService> logger)
{
_targetStore = targetStore ?? throw new ArgumentNullException(nameof(targetStore));
_source = source ?? throw new ArgumentNullException(nameof(source));
_extractor = extractor ?? throw new ArgumentNullException(nameof(extractor));
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Runs the migration asynchronously, reporting progress.
/// </summary>
public async IAsyncEnumerable<ArtifactMigrationResult> MigrateAsync(
IProgress<MigrationProgress>? progress = null,
[EnumeratorCancellation] CancellationToken ct = default)
{
var totalCount = await _source.CountAsync(ct).ConfigureAwait(false);
var startedAt = DateTimeOffset.UtcNow;
var processed = 0;
var succeeded = 0;
var failed = 0;
var skipped = 0;
_logger.LogInformation("Starting migration of {Count} artifacts", totalCount);
var semaphore = new SemaphoreSlim(_options.MaxParallelism);
var batch = new List<Task<ArtifactMigrationResult>>(_options.BatchSize);
await foreach (var legacy in _source.EnumerateAsync(ct).ConfigureAwait(false))
{
await semaphore.WaitAsync(ct).ConfigureAwait(false);
batch.Add(Task.Run(async () =>
{
try
{
return await MigrateOneAsync(legacy, ct).ConfigureAwait(false);
}
finally
{
semaphore.Release();
}
}, ct));
// Process batch when full
if (batch.Count >= _options.BatchSize)
{
foreach (var result in await ProcessBatchAsync(batch))
{
processed++;
if (result.Success && !result.Skipped) succeeded++;
else if (result.Skipped) skipped++;
else failed++;
progress?.Report(new MigrationProgress
{
TotalItems = totalCount,
ProcessedItems = processed,
SuccessCount = succeeded,
FailureCount = failed,
SkippedCount = skipped,
StartedAt = startedAt,
LastUpdateAt = DateTimeOffset.UtcNow,
CurrentItem = result.OriginalPath
});
yield return result;
}
batch.Clear();
}
}
// Process remaining
if (batch.Count > 0)
{
foreach (var result in await ProcessBatchAsync(batch))
{
processed++;
if (result.Success && !result.Skipped) succeeded++;
else if (result.Skipped) skipped++;
else failed++;
progress?.Report(new MigrationProgress
{
TotalItems = totalCount,
ProcessedItems = processed,
SuccessCount = succeeded,
FailureCount = failed,
SkippedCount = skipped,
StartedAt = startedAt,
LastUpdateAt = DateTimeOffset.UtcNow,
CurrentItem = result.OriginalPath
});
yield return result;
}
}
_logger.LogInformation(
"Migration completed: {Succeeded} succeeded, {Failed} failed, {Skipped} skipped out of {Total}",
succeeded, failed, skipped, totalCount);
}
private async Task<IReadOnlyList<ArtifactMigrationResult>> ProcessBatchAsync(
List<Task<ArtifactMigrationResult>> batch)
{
await Task.WhenAll(batch).ConfigureAwait(false);
return batch.Select(t => t.Result).ToList();
}
private async Task<ArtifactMigrationResult> MigrateOneAsync(LegacyArtifact legacy, CancellationToken ct)
{
try
{
// Read content from legacy store
var stream = await _source.ReadAsync(legacy.LegacyPath, ct).ConfigureAwait(false);
if (stream == null)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = false,
Skipped = false,
ErrorMessage = "Content not found"
};
}
// Buffer the stream for multiple reads
using var memoryStream = new MemoryStream();
await stream.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
await stream.DisposeAsync().ConfigureAwait(false);
memoryStream.Position = 0;
// Try to extract bom-ref from content
string bomRef;
string serialNumber;
if (IsSbomContent(legacy.ContentType))
{
var metadata = await _extractor.ExtractAsync(memoryStream, ct).ConfigureAwait(false);
memoryStream.Position = 0;
if (metadata.Success)
{
// Prefer purl, fallback to bom-ref
bomRef = metadata.PrimaryPurl ?? metadata.PrimaryBomRef ?? GenerateFallbackBomRef(legacy);
serialNumber = metadata.SerialNumber ?? GenerateFallbackSerial(legacy);
}
else
{
// Fallback for malformed SBOMs
bomRef = GenerateFallbackBomRef(legacy);
serialNumber = GenerateFallbackSerial(legacy);
}
}
else
{
// Non-SBOM content: use legacy path to generate bom-ref
bomRef = GenerateFallbackBomRef(legacy);
serialNumber = GenerateFallbackSerial(legacy);
}
// Generate artifact ID from legacy path
var artifactId = GenerateArtifactId(legacy);
// Check if already exists
if (_options.SkipExisting)
{
var exists = await _targetStore.ExistsAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (exists)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = true,
Skipped = true,
BomRef = bomRef,
SerialNumber = serialNumber
};
}
}
// Store in unified store
var storeRequest = new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = serialNumber,
ArtifactId = artifactId,
Content = memoryStream,
ContentType = legacy.ContentType,
Type = InferArtifactType(legacy.ContentType, legacy.LegacyPath),
TenantId = legacy.TenantId,
Overwrite = false,
Metadata = new Dictionary<string, string>
{
["legacy_path"] = legacy.LegacyPath,
["migrated_at"] = DateTimeOffset.UtcNow.ToString("O")
}
};
var result = await _targetStore.StoreAsync(storeRequest, ct).ConfigureAwait(false);
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = result.StorageKey,
Success = result.Success,
Skipped = false,
BomRef = bomRef,
SerialNumber = serialNumber,
ErrorMessage = result.ErrorMessage
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to migrate {Path}", legacy.LegacyPath);
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = false,
Skipped = false,
ErrorMessage = ex.Message
};
}
}
private static bool IsSbomContent(string contentType)
{
return contentType.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)
|| contentType.Contains("spdx", StringComparison.OrdinalIgnoreCase)
|| contentType == "application/json"; // Assume JSON might be SBOM
}
private static string GenerateFallbackBomRef(LegacyArtifact legacy)
{
// Generate a purl-like reference from the legacy path
var sanitized = legacy.LegacyPath
.Replace("\\", "/")
.Replace("tenants/", "")
.Replace("bundles/", "");
return $"pkg:stella/legacy/{Uri.EscapeDataString(sanitized)}";
}
private static string GenerateFallbackSerial(LegacyArtifact legacy)
{
// Generate deterministic serial from path
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(legacy.LegacyPath));
var guid = new Guid(hash.Take(16).ToArray());
return $"urn:uuid:{guid}";
}
private static string GenerateArtifactId(LegacyArtifact legacy)
{
// Extract filename from path or generate UUID
var fileName = Path.GetFileNameWithoutExtension(legacy.LegacyPath);
return !string.IsNullOrEmpty(fileName) ? fileName : Guid.NewGuid().ToString();
}
private static ArtifactType InferArtifactType(string contentType, string path)
{
if (contentType.Contains("cyclonedx") || contentType.Contains("spdx"))
return ArtifactType.Sbom;
if (contentType.Contains("vex") || contentType.Contains("openvex"))
return ArtifactType.Vex;
if (contentType.Contains("dsse") || path.Contains("dsse"))
return ArtifactType.DsseEnvelope;
if (path.Contains("rekor"))
return ArtifactType.RekorProof;
if (path.Contains("verdict"))
return ArtifactType.Verdict;
return ArtifactType.Unknown;
}
}

View File

@@ -0,0 +1,60 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationState.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Tracks migration progress state
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
internal sealed class ArtifactMigrationState
{
private readonly TimeProvider _timeProvider;
public ArtifactMigrationState(int totalItems, TimeProvider timeProvider)
{
ArgumentNullException.ThrowIfNull(timeProvider);
_timeProvider = timeProvider;
TotalItems = totalItems;
StartedAt = _timeProvider.GetUtcNow();
LastUpdateAt = StartedAt;
}
public int TotalItems { get; }
public int ProcessedItems { get; private set; }
public int SuccessCount { get; private set; }
public int FailureCount { get; private set; }
public int SkippedCount { get; private set; }
public DateTimeOffset StartedAt { get; }
public DateTimeOffset LastUpdateAt { get; private set; }
public MigrationProgress Apply(ArtifactMigrationResult result)
{
ProcessedItems++;
if (result.Success && !result.Skipped)
{
SuccessCount++;
}
else if (result.Skipped)
{
SkippedCount++;
}
else
{
FailureCount++;
}
LastUpdateAt = _timeProvider.GetUtcNow();
return new MigrationProgress
{
TotalItems = TotalItems,
ProcessedItems = ProcessedItems,
SuccessCount = SuccessCount,
FailureCount = FailureCount,
SkippedCount = SkippedCount,
StartedAt = StartedAt,
LastUpdateAt = LastUpdateAt,
CurrentItem = result.OriginalPath
};
}
}

View File

@@ -0,0 +1,38 @@
// -----------------------------------------------------------------------------
// ArtifactTenantContext.cs
// Sprint: SPRINT_20260130_002_Tools_csproj_remediation_solid_review
// Task: REMED-05 - Remove service locator usage
// Description: Tenant context abstraction for artifact infrastructure
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
public interface IArtifactTenantContext
{
Guid TenantId { get; }
string TenantIdValue { get; }
}
public sealed class ArtifactTenantContext : IArtifactTenantContext
{
public const string DefaultTenantIdValue = "00000000-0000-0000-0000-000000000001";
public static readonly Guid DefaultTenantId = Guid.Parse(DefaultTenantIdValue);
public ArtifactTenantContext()
: this(DefaultTenantId)
{
}
public ArtifactTenantContext(Guid tenantId)
{
if (tenantId == Guid.Empty)
{
throw new ArgumentException("Tenant id must be non-empty.", nameof(tenantId));
}
TenantId = tenantId;
TenantIdValue = tenantId.ToString("D");
}
public Guid TenantId { get; }
public string TenantIdValue { get; }
}

View File

@@ -0,0 +1,66 @@
// -----------------------------------------------------------------------------
// IArtifactIndexRepository.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: Artifact index repository abstraction
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL repository for artifact index.
/// Provides efficient bom-ref based querying.
/// </summary>
public interface IArtifactIndexRepository
{
/// <summary>
/// Indexes a stored artifact.
/// </summary>
Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by bom-ref.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by bom-ref and serial number.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default);
/// <summary>
/// Gets a specific artifact index entry.
/// </summary>
Task<ArtifactIndexEntry?> GetAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default);
/// <summary>
/// Removes an artifact from the index.
/// </summary>
Task<bool> RemoveAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default);
/// <summary>
/// Finds artifacts by SHA-256 hash.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by type.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,28 @@
// -----------------------------------------------------------------------------
// ILegacyArtifactSource.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Legacy artifact source abstraction
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Legacy artifact source for migration.
/// </summary>
public interface ILegacyArtifactSource
{
/// <summary>
/// Enumerates all artifacts in the legacy store.
/// </summary>
IAsyncEnumerable<LegacyArtifact> EnumerateAsync(CancellationToken ct = default);
/// <summary>
/// Gets the total count of artifacts.
/// </summary>
Task<int> CountAsync(CancellationToken ct = default);
/// <summary>
/// Reads content from a legacy path.
/// </summary>
Task<Stream?> ReadAsync(string legacyPath, CancellationToken ct = default);
}

View File

@@ -0,0 +1,26 @@
// -----------------------------------------------------------------------------
// IS3UnifiedClient.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: S3 client abstraction for unified artifact store
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// S3 client interface for dependency injection.
/// </summary>
public interface IS3UnifiedClient
{
Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken ct);
Task PutObjectAsync(
string bucketName,
string key,
Stream content,
string contentType,
IDictionary<string, string> metadata,
CancellationToken ct);
Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken ct);
Task<IDictionary<string, string>?> GetObjectMetadataAsync(string bucketName, string key, CancellationToken ct);
Task DeleteObjectAsync(string bucketName, string key, CancellationToken ct);
Task<IReadOnlyList<string>> ListObjectsAsync(string bucketName, string prefix, CancellationToken ct);
}

View File

@@ -0,0 +1,86 @@
// -----------------------------------------------------------------------------
// InMemoryArtifactIndexRepository.Read.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: In-memory artifact index read operations
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class InMemoryArtifactIndexRepository
{
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.BomRef == bomRef && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.BomRef == bomRef && e.SerialNumber == serialNumber && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<ArtifactIndexEntry?> GetAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
lock (_lock)
{
var entry = _entries.FirstOrDefault(e =>
e.BomRef == bomRef &&
e.SerialNumber == serialNumber &&
e.ArtifactId == artifactId &&
!e.IsDeleted);
return Task.FromResult(entry);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.Sha256 == sha256 && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.Type == type && e.TenantId == tenantId && !e.IsDeleted)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
}

View File

@@ -0,0 +1,59 @@
// -----------------------------------------------------------------------------
// InMemoryArtifactIndexRepository.Write.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: In-memory artifact index write operations
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class InMemoryArtifactIndexRepository
{
/// <inheritdoc />
public Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(entry);
lock (_lock)
{
_entries.RemoveAll(e =>
e.BomRef == entry.BomRef &&
e.SerialNumber == entry.SerialNumber &&
e.ArtifactId == entry.ArtifactId);
_entries.Add(entry);
}
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<bool> RemoveAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
lock (_lock)
{
var entry = _entries.FirstOrDefault(e =>
e.BomRef == bomRef &&
e.SerialNumber == serialNumber &&
e.ArtifactId == artifactId &&
!e.IsDeleted);
if (entry != null)
{
var now = _timeProvider.GetUtcNow();
var index = _entries.IndexOf(entry);
_entries[index] = entry with
{
IsDeleted = true,
DeletedAt = now,
UpdatedAt = now
};
return Task.FromResult(true);
}
return Task.FromResult(false);
}
}
}

View File

@@ -0,0 +1,24 @@
// -----------------------------------------------------------------------------
// InMemoryArtifactIndexRepository.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: In-memory artifact index repository for testing
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// In-memory implementation for testing.
/// </summary>
public sealed partial class InMemoryArtifactIndexRepository : IArtifactIndexRepository
{
private readonly List<ArtifactIndexEntry> _entries = new();
private readonly object _lock = new();
private readonly TimeProvider _timeProvider;
public InMemoryArtifactIndexRepository(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
}

View File

@@ -0,0 +1,98 @@
using System.Security.Cryptography;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed class InMemoryArtifactStore : IArtifactStore
{
private readonly Dictionary<string, (byte[] Content, ArtifactMetadata Metadata)> _artifacts = new();
private readonly object _lock = new();
private readonly TimeProvider _timeProvider;
public InMemoryArtifactStore(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
var key = $"{request.BomRef}/{request.SerialNumber}/{request.ArtifactId}";
using var ms = new MemoryStream();
request.Content.CopyTo(ms);
var content = ms.ToArray();
var sha256 = Convert.ToHexStringLower(SHA256.HashData(content));
var metadata = new ArtifactMetadata
{
StorageKey = key,
BomRef = request.BomRef,
SerialNumber = request.SerialNumber,
ArtifactId = request.ArtifactId,
ContentType = request.ContentType,
SizeBytes = content.Length,
Sha256 = sha256,
CreatedAt = _timeProvider.GetUtcNow(),
Type = request.Type,
TenantId = request.TenantId
};
lock (_lock)
{
var wasCreated = !_artifacts.ContainsKey(key);
_artifacts[key] = (content, metadata);
return Task.FromResult(ArtifactStoreResult.Succeeded(key, sha256, content.Length, wasCreated));
}
}
public Task<ArtifactReadResult> ReadAsync(string bomRef, string? serialNumber, string? artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var matching = _artifacts
.Where(kvp => kvp.Value.Metadata.BomRef == bomRef)
.Where(kvp => serialNumber == null || kvp.Value.Metadata.SerialNumber == serialNumber)
.Where(kvp => artifactId == null || kvp.Value.Metadata.ArtifactId == artifactId)
.FirstOrDefault();
if (matching.Value.Content == null)
{
return Task.FromResult(ArtifactReadResult.NotFound());
}
return Task.FromResult(ArtifactReadResult.Succeeded(
new MemoryStream(matching.Value.Content),
matching.Value.Metadata));
}
}
public Task<IReadOnlyList<ArtifactMetadata>> ListAsync(string bomRef, string? serialNumber = null, CancellationToken ct = default)
{
lock (_lock)
{
var result = _artifacts.Values
.Where(x => x.Metadata.BomRef == bomRef)
.Where(x => serialNumber == null || x.Metadata.SerialNumber == serialNumber)
.Select(x => x.Metadata)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactMetadata>>(result);
}
}
public Task<bool> ExistsAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.ContainsKey(key));
}
}
public Task<ArtifactMetadata?> GetMetadataAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.TryGetValue(key, out var entry) ? entry.Metadata : null);
}
}
public Task<bool> DeleteAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.Remove(key));
}
}
}

View File

@@ -0,0 +1,20 @@
// -----------------------------------------------------------------------------
// LegacyArtifact.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Legacy artifact descriptor for migration
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Legacy artifact descriptor.
/// </summary>
public sealed record LegacyArtifact
{
public required string LegacyPath { get; init; }
public required string ContentType { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public Guid TenantId { get; init; }
public string? BundleId { get; init; }
}

View File

@@ -0,0 +1,25 @@
// -----------------------------------------------------------------------------
// MigrationProgress.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Migration progress reporting model
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Progress report for migration.
/// </summary>
public sealed record MigrationProgress
{
public int TotalItems { get; init; }
public int ProcessedItems { get; init; }
public int SuccessCount { get; init; }
public int FailureCount { get; init; }
public int SkippedCount { get; init; }
public DateTimeOffset StartedAt { get; init; }
public DateTimeOffset LastUpdateAt { get; init; }
public string CurrentItem { get; init; } = string.Empty;
public TimeSpan EstimatedRemaining => ProcessedItems > 0
? TimeSpan.FromSeconds((TotalItems - ProcessedItems) * (LastUpdateAt - StartedAt).TotalSeconds / ProcessedItems)
: TimeSpan.Zero;
}

View File

@@ -0,0 +1,81 @@
// -----------------------------------------------------------------------------
// PostgresArtifactIndexRepository.Find.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: Query operations for the artifact repository
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class PostgresArtifactIndexRepository
{
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
{
return await QueryAsync(_tenantKey, ArtifactIndexSql.SelectByBomRef, cmd =>
{
AddParameter(cmd, "tenant_id", _tenantId);
AddParameter(cmd, "bom_ref", bomRef);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default)
{
return await QueryAsync(_tenantKey, ArtifactIndexSql.SelectByBomRefAndSerial, cmd =>
{
AddParameter(cmd, "tenant_id", _tenantId);
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
{
return await QueryAsync(_tenantKey, ArtifactIndexSql.SelectBySha256, cmd =>
{
AddParameter(cmd, "sha256", sha256);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default)
{
var tenantKey = tenantId.ToString("D");
return await QueryAsync(tenantKey, ArtifactIndexSql.SelectByType, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "artifact_type", type.ToString());
AddParameter(cmd, "limit", limit);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <summary>
/// Finds artifacts within a time range.
/// </summary>
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTimeRangeAsync(
Guid tenantId,
DateTimeOffset from,
DateTimeOffset to,
int limit = 1000,
CancellationToken ct = default)
{
var tenantKey = tenantId.ToString("D");
return await QueryAsync(tenantKey, ArtifactIndexSql.SelectByTimeRange, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "from", from);
AddParameter(cmd, "to", to);
AddParameter(cmd, "limit", limit);
}, MapEntry, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,33 @@
// -----------------------------------------------------------------------------
// PostgresArtifactIndexRepository.Index.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: Index write operations for the artifact repository
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class PostgresArtifactIndexRepository
{
/// <inheritdoc />
public async Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(entry);
await using var connection = await DataSource.OpenConnectionAsync(_tenantKey, "writer", ct)
.ConfigureAwait(false);
await using var command = CreateCommand(ArtifactIndexSql.Insert, connection);
AddParameter(command, "id", entry.Id);
AddParameter(command, "tenant_id", entry.TenantId);
AddParameter(command, "bom_ref", entry.BomRef);
AddParameter(command, "serial_number", entry.SerialNumber);
AddParameter(command, "artifact_id", entry.ArtifactId);
AddParameter(command, "storage_key", entry.StorageKey);
AddParameter(command, "artifact_type", entry.Type.ToString());
AddParameter(command, "content_type", entry.ContentType);
AddParameter(command, "sha256", entry.Sha256);
AddParameter(command, "size_bytes", entry.SizeBytes);
AddParameter(command, "created_at", entry.CreatedAt);
await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,39 @@
// -----------------------------------------------------------------------------
// PostgresArtifactIndexRepository.Mapping.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: Row mapping helpers for artifact index repository
// -----------------------------------------------------------------------------
using Npgsql;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class PostgresArtifactIndexRepository
{
private static ArtifactIndexEntry MapEntry(NpgsqlDataReader reader)
{
var artifactTypeString = reader.GetString(6);
var artifactType = Enum.TryParse<ArtifactType>(artifactTypeString, out var parsedType)
? parsedType
: ArtifactType.Unknown;
return new ArtifactIndexEntry
{
Id = reader.GetGuid(0),
TenantId = reader.GetGuid(1),
BomRef = reader.GetString(2),
SerialNumber = reader.GetString(3),
ArtifactId = reader.GetString(4),
StorageKey = reader.GetString(5),
Type = artifactType,
ContentType = reader.GetString(7),
Sha256 = reader.GetString(8),
SizeBytes = reader.GetInt64(9),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10),
UpdatedAt = reader.IsDBNull(11) ? null : reader.GetFieldValue<DateTimeOffset>(11),
IsDeleted = reader.GetBoolean(12),
DeletedAt = reader.IsDBNull(13) ? null : reader.GetFieldValue<DateTimeOffset>(13)
};
}
}

View File

@@ -0,0 +1,60 @@
// -----------------------------------------------------------------------------
// PostgresArtifactIndexRepository.Mutate.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: Mutation operations for the artifact repository
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class PostgresArtifactIndexRepository
{
/// <inheritdoc />
public async Task<ArtifactIndexEntry?> GetAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var results = await QueryAsync(_tenantKey, ArtifactIndexSql.SelectByKey, cmd =>
{
AddParameter(cmd, "tenant_id", _tenantId);
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
AddParameter(cmd, "artifact_id", artifactId);
}, MapEntry, ct).ConfigureAwait(false);
return results.Count > 0 ? results[0] : null;
}
/// <inheritdoc />
public async Task<bool> RemoveAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var rowsAffected = await ExecuteAsync(_tenantKey, ArtifactIndexSql.UpdateSoftDelete, cmd =>
{
AddParameter(cmd, "tenant_id", _tenantId);
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
AddParameter(cmd, "artifact_id", artifactId);
}, ct).ConfigureAwait(false);
return rowsAffected > 0;
}
/// <summary>
/// Counts artifacts for a tenant.
/// </summary>
public async Task<int> CountAsync(Guid tenantId, CancellationToken ct = default)
{
var tenantKey = tenantId.ToString("D");
var result = await ExecuteScalarAsync<long>(tenantKey, ArtifactIndexSql.CountByTenant, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
}, ct).ConfigureAwait(false);
return (int)result;
}
}

View File

@@ -4,307 +4,27 @@
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: PostgreSQL implementation of artifact index repository
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Artifact.Core;
using StellaOps.Infrastructure.Postgres.Connections;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL data source for the Artifact module.
/// </summary>
public sealed class ArtifactDataSource : DataSourceBase
{
public const string DefaultSchemaName = "evidence";
public ArtifactDataSource(
Microsoft.Extensions.Options.IOptions<StellaOps.Infrastructure.Postgres.Options.PostgresOptions> options,
ILogger<ArtifactDataSource> logger)
: base(CreateOptions(options.Value), logger)
{
}
protected override string ModuleName => "Artifact";
private static StellaOps.Infrastructure.Postgres.Options.PostgresOptions CreateOptions(
StellaOps.Infrastructure.Postgres.Options.PostgresOptions baseOptions)
{
if (string.IsNullOrWhiteSpace(baseOptions.SchemaName))
{
baseOptions.SchemaName = DefaultSchemaName;
}
return baseOptions;
}
}
/// <summary>
/// PostgreSQL implementation of <see cref="IArtifactIndexRepository"/>.
/// </summary>
public sealed class PostgresArtifactIndexRepository : RepositoryBase<ArtifactDataSource>, IArtifactIndexRepository
public sealed partial class PostgresArtifactIndexRepository : RepositoryBase<ArtifactDataSource>, IArtifactIndexRepository
{
private readonly string _tenantId;
private readonly Guid _tenantId;
private readonly string _tenantKey;
public PostgresArtifactIndexRepository(
ArtifactDataSource dataSource,
ILogger<PostgresArtifactIndexRepository> logger,
string tenantId = "default")
IArtifactTenantContext tenantContext)
: base(dataSource, logger)
{
_tenantId = tenantId;
}
/// <inheritdoc />
public async Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
{
const string sql = """
INSERT INTO evidence.artifact_index (
id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at
) VALUES (
@id, @tenant_id, @bom_ref, @serial_number, @artifact_id, @storage_key,
@artifact_type, @content_type, @sha256, @size_bytes, @created_at
)
ON CONFLICT (tenant_id, bom_ref, serial_number, artifact_id)
DO UPDATE SET
storage_key = EXCLUDED.storage_key,
artifact_type = EXCLUDED.artifact_type,
content_type = EXCLUDED.content_type,
sha256 = EXCLUDED.sha256,
size_bytes = EXCLUDED.size_bytes,
updated_at = NOW(),
is_deleted = FALSE,
deleted_at = NULL
""";
await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "id", entry.Id);
AddParameter(command, "tenant_id", entry.TenantId);
AddParameter(command, "bom_ref", entry.BomRef);
AddParameter(command, "serial_number", entry.SerialNumber);
AddParameter(command, "artifact_id", entry.ArtifactId);
AddParameter(command, "storage_key", entry.StorageKey);
AddParameter(command, "artifact_type", entry.Type.ToString());
AddParameter(command, "content_type", entry.ContentType);
AddParameter(command, "sha256", entry.Sha256);
AddParameter(command, "size_bytes", entry.SizeBytes);
AddParameter(command, "created_at", entry.CreatedAt);
await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND NOT is_deleted
ORDER BY created_at DESC
""";
return await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
AddParameter(cmd, "bom_ref", bomRef);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number AND NOT is_deleted
ORDER BY created_at DESC
""";
return await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<ArtifactIndexEntry?> GetAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
AND artifact_id = @artifact_id AND NOT is_deleted
""";
var results = await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
AddParameter(cmd, "artifact_id", artifactId);
}, MapEntry, ct).ConfigureAwait(false);
return results.Count > 0 ? results[0] : null;
}
/// <inheritdoc />
public async Task<bool> RemoveAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
const string sql = """
UPDATE evidence.artifact_index
SET is_deleted = TRUE, deleted_at = NOW(), updated_at = NOW()
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
AND artifact_id = @artifact_id AND NOT is_deleted
""";
await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", Guid.Parse(_tenantId));
AddParameter(command, "bom_ref", bomRef);
AddParameter(command, "serial_number", serialNumber);
AddParameter(command, "artifact_id", artifactId);
var rowsAffected = await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
return rowsAffected > 0;
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE sha256 = @sha256 AND NOT is_deleted
ORDER BY created_at DESC
LIMIT 100
""";
return await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "sha256", sha256);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND artifact_type = @artifact_type AND NOT is_deleted
ORDER BY created_at DESC
LIMIT @limit
""";
return await QueryAsync(tenantId.ToString(), sql, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "artifact_type", type.ToString());
AddParameter(cmd, "limit", limit);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <summary>
/// Finds artifacts within a time range.
/// </summary>
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTimeRangeAsync(
Guid tenantId,
DateTimeOffset from,
DateTimeOffset to,
int limit = 1000,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND created_at >= @from AND created_at < @to AND NOT is_deleted
ORDER BY created_at DESC
LIMIT @limit
""";
return await QueryAsync(tenantId.ToString(), sql, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "from", from);
AddParameter(cmd, "to", to);
AddParameter(cmd, "limit", limit);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <summary>
/// Counts artifacts for a tenant.
/// </summary>
public async Task<int> CountAsync(Guid tenantId, CancellationToken ct = default)
{
const string sql = """
SELECT COUNT(*) FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND NOT is_deleted
""";
await using var connection = await DataSource.OpenConnectionAsync(tenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
var result = await command.ExecuteScalarAsync(ct).ConfigureAwait(false);
return Convert.ToInt32(result);
}
private static ArtifactIndexEntry MapEntry(NpgsqlDataReader reader)
{
var artifactTypeString = reader.GetString(6);
var artifactType = Enum.TryParse<ArtifactType>(artifactTypeString, out var at) ? at : ArtifactType.Unknown;
return new ArtifactIndexEntry
{
Id = reader.GetGuid(0),
TenantId = reader.GetGuid(1),
BomRef = reader.GetString(2),
SerialNumber = reader.GetString(3),
ArtifactId = reader.GetString(4),
StorageKey = reader.GetString(5),
Type = artifactType,
ContentType = reader.GetString(7),
Sha256 = reader.GetString(8),
SizeBytes = reader.GetInt64(9),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10),
UpdatedAt = reader.IsDBNull(11) ? null : reader.GetFieldValue<DateTimeOffset>(11),
IsDeleted = reader.GetBoolean(12),
DeletedAt = reader.IsDBNull(13) ? null : reader.GetFieldValue<DateTimeOffset>(13)
};
ArgumentNullException.ThrowIfNull(tenantContext);
_tenantId = tenantContext.TenantId;
_tenantKey = tenantContext.TenantIdValue;
}
}

View File

@@ -0,0 +1,34 @@
// -----------------------------------------------------------------------------
// RetentionPolicy.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Retention policy settings for artifacts
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Retention policy for artifact types.
/// Sprint: SPRINT_20260118_017 (AS-002)
/// </summary>
public sealed class RetentionPolicy
{
/// <summary>
/// Number of days to retain artifacts.
/// </summary>
public int RetentionDays { get; set; } = 365 * 5;
/// <summary>
/// Whether to delete artifacts after expiry (true) or just mark expired (false).
/// </summary>
public bool DeleteAfterExpiry { get; set; } = false;
/// <summary>
/// Optional S3 storage class to transition to after specified days.
/// </summary>
public string? TransitionStorageClass { get; set; }
/// <summary>
/// Days after creation to transition to TransitionStorageClass.
/// </summary>
public int? TransitionAfterDays { get; set; }
}

View File

@@ -0,0 +1,41 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStore.Exists.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Existence and delete operations for the artifact store
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
/// <inheritdoc />
public async Task<bool> ExistsAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct)
.ConfigureAwait(false);
return entry != null;
}
/// <inheritdoc />
public async Task<bool> DeleteAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var removed = await _indexRepository.RemoveAsync(bomRef, serialNumber, artifactId, ct)
.ConfigureAwait(false);
if (removed)
{
_logger.LogInformation("Soft-deleted artifact {ArtifactId} for bom-ref {BomRef}", artifactId, bomRef);
}
return removed;
}
}

View File

@@ -0,0 +1,48 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStore.Helpers.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Helper methods for S3-backed artifact store
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
private string BuildFullKey(string relativePath)
{
var prefix = string.IsNullOrWhiteSpace(_options.Prefix)
? ""
: _options.Prefix.TrimEnd('/') + "/";
return $"{prefix}{relativePath}";
}
private static string ComputeSha256(byte[] content)
{
var hashBytes = SHA256.HashData(content);
return Convert.ToHexStringLower(hashBytes);
}
private static Dictionary<string, string> BuildS3Metadata(ArtifactStoreRequest request)
{
var metadata = new Dictionary<string, string>
{
["x-amz-meta-bomref"] = request.BomRef,
["x-amz-meta-serialnumber"] = request.SerialNumber,
["x-amz-meta-artifactid"] = request.ArtifactId,
["x-amz-meta-artifacttype"] = request.Type.ToString()
};
if (request.Metadata != null)
{
foreach (var kvp in request.Metadata)
{
metadata[$"x-amz-meta-{kvp.Key.ToLowerInvariant()}"] = kvp.Value;
}
}
return metadata;
}
}

View File

@@ -0,0 +1,33 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStore.List.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: List operations for the unified artifact store
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactMetadata>> ListAsync(
string bomRef,
string? serialNumber = null,
CancellationToken ct = default)
{
IReadOnlyList<ArtifactIndexEntry> entries;
if (serialNumber != null)
{
entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct)
.ConfigureAwait(false);
}
else
{
entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
}
return entries.Select(CreateMetadata).ToList();
}
}

View File

@@ -0,0 +1,41 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStore.Metadata.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Metadata projection helpers for artifact store
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
/// <inheritdoc />
public async Task<ArtifactMetadata?> GetMetadataAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct)
.ConfigureAwait(false);
return entry == null ? null : CreateMetadata(entry);
}
private static ArtifactMetadata CreateMetadata(ArtifactIndexEntry entry)
{
return new ArtifactMetadata
{
StorageKey = entry.StorageKey,
BomRef = entry.BomRef,
SerialNumber = entry.SerialNumber,
ArtifactId = entry.ArtifactId,
ContentType = entry.ContentType,
SizeBytes = entry.SizeBytes,
Sha256 = entry.Sha256,
CreatedAt = entry.CreatedAt,
Type = entry.Type,
TenantId = entry.TenantId
};
}
}

View File

@@ -0,0 +1,66 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStore.Read.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Read operations for the unified artifact store
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
/// <inheritdoc />
public async Task<ArtifactReadResult> ReadAsync(
string bomRef,
string? serialNumber,
string? artifactId,
CancellationToken ct = default)
{
try
{
var entry = await ResolveEntryAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (entry == null)
{
return ArtifactReadResult.NotFound($"No artifact found for bom-ref: {bomRef}");
}
var stream = await _client.GetObjectAsync(_options.BucketName, entry.StorageKey, ct)
.ConfigureAwait(false);
if (stream == null)
{
return ArtifactReadResult.NotFound($"Object not found in S3: {entry.StorageKey}");
}
return ArtifactReadResult.Succeeded(stream, CreateMetadata(entry));
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to read artifact for bom-ref {BomRef}", bomRef);
return ArtifactReadResult.NotFound(ex.Message);
}
}
private async Task<ArtifactIndexEntry?> ResolveEntryAsync(
string bomRef,
string? serialNumber,
string? artifactId,
CancellationToken ct)
{
if (serialNumber != null && artifactId != null)
{
return await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
}
if (serialNumber != null)
{
var entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct)
.ConfigureAwait(false);
return entries.FirstOrDefault();
}
var candidates = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
return candidates.FirstOrDefault();
}
}

View File

@@ -0,0 +1,56 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStore.Store.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Store operations for the unified artifact store
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
/// <inheritdoc />
public async Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
var storagePath = BomRefEncoder.BuildPath(request.BomRef, request.SerialNumber, request.ArtifactId);
var fullKey = BuildFullKey(storagePath);
var existing = await TryGetExistingAsync(request, fullKey, ct).ConfigureAwait(false);
if (existing != null)
{
return existing;
}
var (contentBytes, sha256) = await ReadContentAsync(request, ct).ConfigureAwait(false);
if (contentBytes.Length > _options.MaxArtifactSizeBytes)
{
return ArtifactStoreResult.Failed($"Artifact exceeds maximum size of {_options.MaxArtifactSizeBytes} bytes");
}
var actualStorageKey = await WriteContentAsync(request, fullKey, contentBytes, sha256, ct)
.ConfigureAwait(false);
var indexEntry = BuildIndexEntry(request, actualStorageKey, sha256, contentBytes.Length);
await _indexRepository.IndexAsync(indexEntry, ct).ConfigureAwait(false);
_logger.LogInformation(
"Stored artifact {ArtifactId} for bom-ref {BomRef} at {Key} ({Size} bytes)",
request.ArtifactId,
request.BomRef,
actualStorageKey,
contentBytes.Length);
return ArtifactStoreResult.Succeeded(actualStorageKey, sha256, contentBytes.Length, wasCreated: true);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store artifact {ArtifactId}", request.ArtifactId);
return ArtifactStoreResult.Failed(ex.Message);
}
}
}

View File

@@ -0,0 +1,100 @@
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
public sealed partial class S3UnifiedArtifactStore
{
private async Task<ArtifactStoreResult?> TryGetExistingAsync(
ArtifactStoreRequest request,
string fullKey,
CancellationToken ct)
{
if (request.Overwrite || _options.AllowOverwrite)
{
return null;
}
var exists = await _client.ObjectExistsAsync(_options.BucketName, fullKey, ct).ConfigureAwait(false);
if (!exists)
{
return null;
}
_logger.LogInformation("Artifact already exists at {Key}, skipping", fullKey);
var entry = await _indexRepository.GetAsync(
request.BomRef,
request.SerialNumber,
request.ArtifactId,
ct).ConfigureAwait(false);
return entry == null
? null
: ArtifactStoreResult.Succeeded(fullKey, entry.Sha256, entry.SizeBytes, wasCreated: false);
}
private async Task<(byte[] ContentBytes, string Sha256)> ReadContentAsync(
ArtifactStoreRequest request,
CancellationToken ct)
{
using var memoryStream = new MemoryStream();
await request.Content.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
var contentBytes = memoryStream.ToArray();
var sha256 = ComputeSha256(contentBytes);
return (contentBytes, sha256);
}
private async Task<string> WriteContentAsync(
ArtifactStoreRequest request,
string fullKey,
byte[] contentBytes,
string sha256,
CancellationToken ct)
{
if (_options.EnableDeduplication)
{
var existingBySha = await _indexRepository.FindBySha256Async(sha256, ct).ConfigureAwait(false);
if (existingBySha.Count > 0)
{
var existingKey = existingBySha[0].StorageKey;
_logger.LogInformation(
"Deduplicating artifact {ArtifactId} - content matches {ExistingKey}",
request.ArtifactId,
existingKey);
return existingKey;
}
}
await UploadAsync(fullKey, request, contentBytes, ct).ConfigureAwait(false);
return fullKey;
}
private async Task UploadAsync(string key, ArtifactStoreRequest request, byte[] contentBytes, CancellationToken ct)
{
using var uploadStream = new MemoryStream(contentBytes);
var metadata = BuildS3Metadata(request);
await _client.PutObjectAsync(
_options.BucketName,
key,
uploadStream,
request.ContentType,
metadata,
ct).ConfigureAwait(false);
}
private ArtifactIndexEntry BuildIndexEntry(
ArtifactStoreRequest request,
string storageKey,
string sha256,
long sizeBytes)
{
var now = _timeProvider.GetUtcNow();
return new ArtifactIndexEntry
{
Id = _guidProvider.NewGuid(),
TenantId = request.TenantId,
BomRef = request.BomRef,
SerialNumber = request.SerialNumber,
ArtifactId = request.ArtifactId,
StorageKey = storageKey,
Type = request.Type,
ContentType = request.ContentType,
Sha256 = sha256,
SizeBytes = sizeBytes,
CreatedAt = now
};
}
}

View File

@@ -1,139 +1,42 @@
// -----------------------------------------------------------------------------
// S3ArtifactStore.cs
// S3UnifiedArtifactStore.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: S3-backed implementation of unified artifact store
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Artifact.Core;
using System.Security.Cryptography;
using StellaOps.Determinism;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Configuration options for S3-backed artifact store.
/// </summary>
public sealed class S3UnifiedArtifactStoreOptions
{
/// <summary>
/// S3 bucket name.
/// </summary>
public string BucketName { get; set; } = string.Empty;
/// <summary>
/// Path prefix within the bucket.
/// </summary>
public string Prefix { get; set; } = "artifacts";
/// <summary>
/// Whether to use content-addressable storage for deduplication.
/// </summary>
public bool EnableDeduplication { get; set; } = true;
/// <summary>
/// Whether to store metadata as sidecar JSON files.
/// </summary>
public bool UseSidecarMetadata { get; set; } = false;
/// <summary>
/// Whether to overwrite existing artifacts.
/// </summary>
public bool AllowOverwrite { get; set; } = false;
/// <summary>
/// Maximum artifact size in bytes.
/// </summary>
public long MaxArtifactSizeBytes { get; set; } = 100 * 1024 * 1024; // 100MB
/// <summary>
/// Retention policies per artifact type. Key is ArtifactType enum name.
/// Sprint: SPRINT_20260118_017 (AS-002)
/// </summary>
public Dictionary<string, RetentionPolicy> RetentionPolicies { get; set; } = new()
{
["Sbom"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false }, // 7 years
["Vex"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["Dsse"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["RekorProof"] = new RetentionPolicy { RetentionDays = 365 * 10, DeleteAfterExpiry = false }, // 10 years
["Attestation"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["BuildLog"] = new RetentionPolicy { RetentionDays = 365, DeleteAfterExpiry = true }, // 1 year
["ScanResult"] = new RetentionPolicy { RetentionDays = 365 * 2, DeleteAfterExpiry = true }, // 2 years
["Temporary"] = new RetentionPolicy { RetentionDays = 30, DeleteAfterExpiry = true }
};
/// <summary>
/// Default retention policy for unspecified artifact types.
/// </summary>
public RetentionPolicy DefaultRetentionPolicy { get; set; } = new()
{
RetentionDays = 365 * 5, // 5 years default
DeleteAfterExpiry = false
};
}
/// <summary>
/// Retention policy for artifact types.
/// Sprint: SPRINT_20260118_017 (AS-002)
/// </summary>
public sealed class RetentionPolicy
{
/// <summary>
/// Number of days to retain artifacts.
/// </summary>
public int RetentionDays { get; set; } = 365 * 5;
/// <summary>
/// Whether to delete artifacts after expiry (true) or just mark expired (false).
/// </summary>
public bool DeleteAfterExpiry { get; set; } = false;
/// <summary>
/// Optional S3 storage class to transition to after specified days.
/// </summary>
public string? TransitionStorageClass { get; set; }
/// <summary>
/// Days after creation to transition to TransitionStorageClass.
/// </summary>
public int? TransitionAfterDays { get; set; }
}
/// <summary>
/// S3 client interface for dependency injection.
/// </summary>
public interface IS3UnifiedClient
{
Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken ct);
Task PutObjectAsync(string bucketName, string key, Stream content, string contentType, IDictionary<string, string> metadata, CancellationToken ct);
Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken ct);
Task<IDictionary<string, string>?> GetObjectMetadataAsync(string bucketName, string key, CancellationToken ct);
Task DeleteObjectAsync(string bucketName, string key, CancellationToken ct);
Task<IReadOnlyList<string>> ListObjectsAsync(string bucketName, string prefix, CancellationToken ct);
}
/// <summary>
/// S3-backed implementation of <see cref="IArtifactStore"/>.
/// Supports content deduplication via SHA-256 and the unified path convention.
/// </summary>
public sealed class S3UnifiedArtifactStore : IArtifactStore
public sealed partial class S3UnifiedArtifactStore : IArtifactStore
{
private readonly IS3UnifiedClient _client;
private readonly IArtifactIndexRepository _indexRepository;
private readonly S3UnifiedArtifactStoreOptions _options;
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
private readonly ILogger<S3UnifiedArtifactStore> _logger;
public S3UnifiedArtifactStore(
IS3UnifiedClient client,
IArtifactIndexRepository indexRepository,
IOptions<S3UnifiedArtifactStoreOptions> options,
TimeProvider timeProvider,
IGuidProvider guidProvider,
ILogger<S3UnifiedArtifactStore> logger)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
_indexRepository = indexRepository ?? throw new ArgumentNullException(nameof(indexRepository));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
if (string.IsNullOrWhiteSpace(_options.BucketName))
@@ -141,290 +44,4 @@ public sealed class S3UnifiedArtifactStore : IArtifactStore
throw new ArgumentException("BucketName must be configured", nameof(options));
}
}
/// <inheritdoc />
public async Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
// Build the storage path using bom-ref convention
var storagePath = BomRefEncoder.BuildPath(request.BomRef, request.SerialNumber, request.ArtifactId);
var fullKey = BuildFullKey(storagePath);
// Check if artifact already exists
if (!request.Overwrite && !_options.AllowOverwrite)
{
var exists = await _client.ObjectExistsAsync(_options.BucketName, fullKey, ct).ConfigureAwait(false);
if (exists)
{
_logger.LogInformation("Artifact already exists at {Key}, skipping", fullKey);
// Return existing metadata
var existingEntry = await _indexRepository.GetAsync(
request.BomRef, request.SerialNumber, request.ArtifactId, ct).ConfigureAwait(false);
if (existingEntry != null)
{
return ArtifactStoreResult.Succeeded(fullKey, existingEntry.Sha256, existingEntry.SizeBytes, wasCreated: false);
}
}
}
// Read content and compute hash
using var memoryStream = new MemoryStream();
await request.Content.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
var contentBytes = memoryStream.ToArray();
if (contentBytes.Length > _options.MaxArtifactSizeBytes)
{
return ArtifactStoreResult.Failed($"Artifact exceeds maximum size of {_options.MaxArtifactSizeBytes} bytes");
}
var sha256 = ComputeSha256(contentBytes);
var sizeBytes = contentBytes.Length;
// Check for content deduplication
string actualStorageKey = fullKey;
if (_options.EnableDeduplication)
{
var existingBySha = await _indexRepository.FindBySha256Async(sha256, ct).ConfigureAwait(false);
if (existingBySha.Count > 0)
{
// Content already exists, just create a new index entry pointing to same content
actualStorageKey = existingBySha[0].StorageKey;
_logger.LogInformation("Deduplicating artifact {ArtifactId} - content matches {ExistingKey}",
request.ArtifactId, actualStorageKey);
}
else
{
// Store new content
using var uploadStream = new MemoryStream(contentBytes);
var metadata = BuildS3Metadata(request);
await _client.PutObjectAsync(
_options.BucketName, fullKey, uploadStream, request.ContentType, metadata, ct).ConfigureAwait(false);
}
}
else
{
// Store without deduplication
using var uploadStream = new MemoryStream(contentBytes);
var metadata = BuildS3Metadata(request);
await _client.PutObjectAsync(
_options.BucketName, fullKey, uploadStream, request.ContentType, metadata, ct).ConfigureAwait(false);
}
// Index the artifact
var indexEntry = new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = request.TenantId,
BomRef = request.BomRef,
SerialNumber = request.SerialNumber,
ArtifactId = request.ArtifactId,
StorageKey = actualStorageKey,
Type = request.Type,
ContentType = request.ContentType,
Sha256 = sha256,
SizeBytes = sizeBytes,
CreatedAt = DateTimeOffset.UtcNow
};
await _indexRepository.IndexAsync(indexEntry, ct).ConfigureAwait(false);
_logger.LogInformation(
"Stored artifact {ArtifactId} for bom-ref {BomRef} at {Key} ({Size} bytes)",
request.ArtifactId, request.BomRef, actualStorageKey, sizeBytes);
return ArtifactStoreResult.Succeeded(actualStorageKey, sha256, sizeBytes, wasCreated: true);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store artifact {ArtifactId}", request.ArtifactId);
return ArtifactStoreResult.Failed(ex.Message);
}
}
/// <inheritdoc />
public async Task<ArtifactReadResult> ReadAsync(
string bomRef,
string? serialNumber,
string? artifactId,
CancellationToken ct = default)
{
try
{
ArtifactIndexEntry? entry;
if (serialNumber != null && artifactId != null)
{
entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
}
else if (serialNumber != null)
{
var entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
entry = entries.FirstOrDefault();
}
else
{
var entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
entry = entries.FirstOrDefault();
}
if (entry == null)
{
return ArtifactReadResult.NotFound($"No artifact found for bom-ref: {bomRef}");
}
var stream = await _client.GetObjectAsync(_options.BucketName, entry.StorageKey, ct).ConfigureAwait(false);
if (stream == null)
{
return ArtifactReadResult.NotFound($"Object not found in S3: {entry.StorageKey}");
}
var metadata = new ArtifactMetadata
{
StorageKey = entry.StorageKey,
BomRef = entry.BomRef,
SerialNumber = entry.SerialNumber,
ArtifactId = entry.ArtifactId,
ContentType = entry.ContentType,
SizeBytes = entry.SizeBytes,
Sha256 = entry.Sha256,
CreatedAt = entry.CreatedAt,
Type = entry.Type,
TenantId = entry.TenantId
};
return ArtifactReadResult.Succeeded(stream, metadata);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to read artifact for bom-ref {BomRef}", bomRef);
return ArtifactReadResult.NotFound(ex.Message);
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactMetadata>> ListAsync(
string bomRef,
string? serialNumber = null,
CancellationToken ct = default)
{
IReadOnlyList<ArtifactIndexEntry> entries;
if (serialNumber != null)
{
entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
}
else
{
entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
}
return entries.Select(e => new ArtifactMetadata
{
StorageKey = e.StorageKey,
BomRef = e.BomRef,
SerialNumber = e.SerialNumber,
ArtifactId = e.ArtifactId,
ContentType = e.ContentType,
SizeBytes = e.SizeBytes,
Sha256 = e.Sha256,
CreatedAt = e.CreatedAt,
Type = e.Type,
TenantId = e.TenantId
}).ToList();
}
/// <inheritdoc />
public async Task<bool> ExistsAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
return entry != null;
}
/// <inheritdoc />
public async Task<ArtifactMetadata?> GetMetadataAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (entry == null)
{
return null;
}
return new ArtifactMetadata
{
StorageKey = entry.StorageKey,
BomRef = entry.BomRef,
SerialNumber = entry.SerialNumber,
ArtifactId = entry.ArtifactId,
ContentType = entry.ContentType,
SizeBytes = entry.SizeBytes,
Sha256 = entry.Sha256,
CreatedAt = entry.CreatedAt,
Type = entry.Type,
TenantId = entry.TenantId
};
}
/// <inheritdoc />
public async Task<bool> DeleteAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
// Soft delete in index (don't delete from S3 for audit trail)
var removed = await _indexRepository.RemoveAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (removed)
{
_logger.LogInformation("Soft-deleted artifact {ArtifactId} for bom-ref {BomRef}", artifactId, bomRef);
}
return removed;
}
private string BuildFullKey(string relativePath)
{
var prefix = string.IsNullOrWhiteSpace(_options.Prefix) ? "" : _options.Prefix.TrimEnd('/') + "/";
return $"{prefix}{relativePath}";
}
private static string ComputeSha256(byte[] content)
{
var hashBytes = SHA256.HashData(content);
return Convert.ToHexStringLower(hashBytes);
}
private static Dictionary<string, string> BuildS3Metadata(ArtifactStoreRequest request)
{
var metadata = new Dictionary<string, string>
{
["x-amz-meta-bomref"] = request.BomRef,
["x-amz-meta-serialnumber"] = request.SerialNumber,
["x-amz-meta-artifactid"] = request.ArtifactId,
["x-amz-meta-artifacttype"] = request.Type.ToString()
};
if (request.Metadata != null)
{
foreach (var kvp in request.Metadata)
{
metadata[$"x-amz-meta-{kvp.Key.ToLowerInvariant()}"] = kvp.Value;
}
}
return metadata;
}
}

View File

@@ -0,0 +1,68 @@
// -----------------------------------------------------------------------------
// S3UnifiedArtifactStoreOptions.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: Configuration options for S3-backed artifact store
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Configuration options for S3-backed artifact store.
/// </summary>
public sealed class S3UnifiedArtifactStoreOptions
{
/// <summary>
/// S3 bucket name.
/// </summary>
public string BucketName { get; set; } = string.Empty;
/// <summary>
/// Path prefix within the bucket.
/// </summary>
public string Prefix { get; set; } = "artifacts";
/// <summary>
/// Whether to use content-addressable storage for deduplication.
/// </summary>
public bool EnableDeduplication { get; set; } = true;
/// <summary>
/// Whether to store metadata as sidecar JSON files.
/// </summary>
public bool UseSidecarMetadata { get; set; } = false;
/// <summary>
/// Whether to overwrite existing artifacts.
/// </summary>
public bool AllowOverwrite { get; set; } = false;
/// <summary>
/// Maximum artifact size in bytes.
/// </summary>
public long MaxArtifactSizeBytes { get; set; } = 100 * 1024 * 1024; // 100MB
/// <summary>
/// Retention policies per artifact type. Key is ArtifactType enum name.
/// Sprint: SPRINT_20260118_017 (AS-002)
/// </summary>
public Dictionary<string, RetentionPolicy> RetentionPolicies { get; set; } = new()
{
["Sbom"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["Vex"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["Dsse"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["RekorProof"] = new RetentionPolicy { RetentionDays = 365 * 10, DeleteAfterExpiry = false },
["Attestation"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["BuildLog"] = new RetentionPolicy { RetentionDays = 365, DeleteAfterExpiry = true },
["ScanResult"] = new RetentionPolicy { RetentionDays = 365 * 2, DeleteAfterExpiry = true },
["Temporary"] = new RetentionPolicy { RetentionDays = 30, DeleteAfterExpiry = true }
};
/// <summary>
/// Default retention policy for unspecified artifact types.
/// </summary>
public RetentionPolicy DefaultRetentionPolicy { get; set; } = new()
{
RetentionDays = 365 * 5,
DeleteAfterExpiry = false
};
}

View File

@@ -8,8 +8,10 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Artifact.Core;
using StellaOps.Concelier.SbomIntegration.Parsing;
using StellaOps.Determinism;
using StellaOps.Infrastructure.Postgres.Options;
namespace StellaOps.Artifact.Infrastructure;
@@ -31,32 +33,23 @@ public static class ServiceCollectionExtensions
IConfiguration configuration,
string sectionName = "ArtifactStore")
{
ArgumentNullException.ThrowIfNull(configuration);
// Configure S3 store options
services.Configure<S3UnifiedArtifactStoreOptions>(configuration.GetSection($"{sectionName}:S3"));
// Configure PostgreSQL options for index
services.Configure<PostgresOptions>("Artifact", configuration.GetSection($"{sectionName}:Postgres"));
services.Configure<PostgresOptions>(ArtifactDataSource.OptionsName, configuration.GetSection($"{sectionName}:Postgres"));
services.TryAddSingleton(TimeProvider.System);
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
services.TryAddScoped<IArtifactTenantContext, ArtifactTenantContext>();
// Register data source
services.AddSingleton<ArtifactDataSource>(sp =>
{
var options = sp.GetRequiredService<IOptionsSnapshot<PostgresOptions>>().Get("Artifact");
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<ArtifactDataSource>>();
return new ArtifactDataSource(Options.Create(options), logger);
});
services.AddSingleton<ArtifactDataSource>();
// Register core services
services.AddSingleton<IParsedSbomParser, ParsedSbomParser>();
services.AddSingleton<ICycloneDxExtractor, CycloneDxExtractor>();
// Register index repository
services.AddScoped<IArtifactIndexRepository>(sp =>
{
var dataSource = sp.GetRequiredService<ArtifactDataSource>();
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<PostgresArtifactIndexRepository>>();
// TODO: Get tenant ID from context
return new PostgresArtifactIndexRepository(dataSource, logger, "default");
});
services.AddScoped<IArtifactIndexRepository, PostgresArtifactIndexRepository>();
// Register S3 artifact store
services.AddScoped<IArtifactStore, S3UnifiedArtifactStore>();
@@ -70,6 +63,10 @@ public static class ServiceCollectionExtensions
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddInMemoryArtifactStore(this IServiceCollection services)
{
services.TryAddSingleton(TimeProvider.System);
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
services.TryAddScoped<IArtifactTenantContext, ArtifactTenantContext>();
services.AddSingleton<IParsedSbomParser, ParsedSbomParser>();
services.AddSingleton<ICycloneDxExtractor, CycloneDxExtractor>();
services.AddSingleton<IArtifactIndexRepository, InMemoryArtifactIndexRepository>();
services.AddSingleton<IArtifactStore, InMemoryArtifactStore>();
@@ -90,113 +87,11 @@ public static class ServiceCollectionExtensions
var options = new ArtifactMigrationOptions();
configure?.Invoke(options);
services.AddSingleton(options);
services.TryAddSingleton(TimeProvider.System);
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
services.AddScoped<ArtifactMigrationService>();
return services;
}
}
/// <summary>
/// In-memory artifact store for testing.
/// </summary>
public sealed class InMemoryArtifactStore : IArtifactStore
{
private readonly Dictionary<string, (byte[] Content, ArtifactMetadata Metadata)> _artifacts = new();
private readonly object _lock = new();
public Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
{
var key = $"{request.BomRef}/{request.SerialNumber}/{request.ArtifactId}";
using var ms = new MemoryStream();
request.Content.CopyTo(ms);
var content = ms.ToArray();
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(content);
var sha256 = Convert.ToHexStringLower(hash);
var metadata = new ArtifactMetadata
{
StorageKey = key,
BomRef = request.BomRef,
SerialNumber = request.SerialNumber,
ArtifactId = request.ArtifactId,
ContentType = request.ContentType,
SizeBytes = content.Length,
Sha256 = sha256,
CreatedAt = DateTimeOffset.UtcNow,
Type = request.Type,
TenantId = request.TenantId
};
lock (_lock)
{
var wasCreated = !_artifacts.ContainsKey(key);
_artifacts[key] = (content, metadata);
return Task.FromResult(ArtifactStoreResult.Succeeded(key, sha256, content.Length, wasCreated));
}
}
public Task<ArtifactReadResult> ReadAsync(string bomRef, string? serialNumber, string? artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var matching = _artifacts
.Where(kvp => kvp.Value.Metadata.BomRef == bomRef)
.Where(kvp => serialNumber == null || kvp.Value.Metadata.SerialNumber == serialNumber)
.Where(kvp => artifactId == null || kvp.Value.Metadata.ArtifactId == artifactId)
.FirstOrDefault();
if (matching.Value.Content == null)
{
return Task.FromResult(ArtifactReadResult.NotFound());
}
return Task.FromResult(ArtifactReadResult.Succeeded(
new MemoryStream(matching.Value.Content),
matching.Value.Metadata));
}
}
public Task<IReadOnlyList<ArtifactMetadata>> ListAsync(string bomRef, string? serialNumber = null, CancellationToken ct = default)
{
lock (_lock)
{
var result = _artifacts.Values
.Where(x => x.Metadata.BomRef == bomRef)
.Where(x => serialNumber == null || x.Metadata.SerialNumber == serialNumber)
.Select(x => x.Metadata)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactMetadata>>(result);
}
}
public Task<bool> ExistsAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.ContainsKey(key));
}
}
public Task<ArtifactMetadata?> GetMetadataAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.TryGetValue(key, out var entry) ? entry.Metadata : null);
}
}
public Task<bool> DeleteAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.Remove(key));
}
}
}

View File

@@ -20,6 +20,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Artifact.Core\StellaOps.Artifact.Core.csproj" />
<ProjectReference Include="..\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
</ItemGroup>

View File

@@ -4,5 +4,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| Task ID | Status | Notes |
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/__Libraries/StellaOps.Artifact.Infrastructure/StellaOps.Artifact.Infrastructure.md. |
| REMED-05 | DONE | Remediation complete; split store/migration/index, tenant context + deterministic time/ID, S3 integration tests added; dotnet test src/__Libraries/StellaOps.Artifact.Core.Tests/StellaOps.Artifact.Core.Tests.csproj passed 2026-02-03 (25 tests, MTP0001 warning). |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |

Some files were not shown because too many files have changed in this diff Show More