doctor enhancements, setup, enhancements, ui functionality and design consolidation and , test projects fixes , product advisory attestation/rekor and delta verfications enhancements

This commit is contained in:
master
2026-01-19 09:02:59 +02:00
parent 8c4bf54aed
commit 17419ba7c4
809 changed files with 170738 additions and 12244 deletions

View File

@@ -0,0 +1,243 @@
// -----------------------------------------------------------------------------
// ArtifactStorePerformanceTests.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Performance test: 1000 artifacts store/retrieve
// Description: Performance benchmarks for artifact store operations
// -----------------------------------------------------------------------------
using System.Diagnostics;
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
using Xunit;
using Xunit.Abstractions;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Performance")]
public sealed class ArtifactStorePerformanceTests
{
private readonly ITestOutputHelper _output;
public ArtifactStorePerformanceTests(ITestOutputHelper output)
{
_output = output;
}
[Fact]
public async Task Store1000Artifacts_CompletesUnderThreshold()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 30000; // 30 seconds for 1000 artifacts (30ms each avg)
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
// Act
var sw = Stopwatch.StartNew();
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
}
sw.Stop();
// Assert
_output.WriteLine($"Stored {artifactCount} artifacts in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Average: {sw.ElapsedMilliseconds / (double)artifactCount:F2}ms per artifact");
_output.WriteLine($"Throughput: {artifactCount / sw.Elapsed.TotalSeconds:F2} artifacts/second");
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"Store operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task Retrieve1000Artifacts_CompletesUnderThreshold()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 10000; // 10 seconds for 1000 reads (10ms each avg)
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
// Store all artifacts first
var storedArtifacts = new List<(string bomRef, string serial, string id)>();
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
storedArtifacts.Add((artifact.BomRef, artifact.SerialNumber!, artifact.ArtifactId));
}
// Act - Read them all back
var sw = Stopwatch.StartNew();
foreach (var (bRef, serial, id) in storedArtifacts)
{
var result = await store.ReadAsync(bRef, serial, id);
Assert.True(result.Found);
}
sw.Stop();
// Assert
_output.WriteLine($"Retrieved {artifactCount} artifacts in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Average: {sw.ElapsedMilliseconds / (double)artifactCount:F2}ms per artifact");
_output.WriteLine($"Throughput: {artifactCount / sw.Elapsed.TotalSeconds:F2} artifacts/second");
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"Retrieve operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task ListByBomRef_1000Artifacts_Under100ms()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 100; // 100ms as per completion criteria
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
foreach (var artifact in artifacts)
{
await store.StoreAsync(artifact);
}
// Act
var sw = Stopwatch.StartNew();
var results = await store.ListAsync(bomRef);
sw.Stop();
// Assert
_output.WriteLine($"Listed {results.Count} artifacts in {sw.ElapsedMilliseconds}ms");
Assert.Equal(artifactCount, results.Count);
Assert.True(sw.ElapsedMilliseconds < maxDurationMs,
$"List operation took {sw.ElapsedMilliseconds}ms, expected under {maxDurationMs}ms");
}
[Fact]
public async Task ParallelStore_1000Artifacts_HandlesContention()
{
// Arrange
const int artifactCount = 1000;
const int maxDurationMs = 60000; // 60 seconds with contention
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/perf-test/app@sha256:abc123def456";
var artifacts = GenerateTestArtifacts(artifactCount, tenantId, bomRef);
// Act - Store in parallel
var sw = Stopwatch.StartNew();
await Parallel.ForEachAsync(
artifacts,
new ParallelOptions { MaxDegreeOfParallelism = 10 },
async (artifact, ct) =>
{
await store.StoreAsync(artifact, ct);
});
sw.Stop();
// Assert
_output.WriteLine($"Parallel stored {artifactCount} artifacts in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Parallelism: 10, Throughput: {artifactCount / sw.Elapsed.TotalSeconds:F2} artifacts/second");
var stored = await store.ListAsync(bomRef);
Assert.Equal(artifactCount, stored.Count);
Assert.True(sw.ElapsedMilliseconds < maxDurationMs);
}
[Fact]
public async Task MixedOperations_CompletesSuccessfully()
{
// Arrange
const int operationCount = 1000;
var store = new InMemoryArtifactStore();
var tenantId = Guid.NewGuid();
var bomRef = "pkg:docker/mixed-test/app@sha256:abc123";
// Pre-populate with 500 artifacts
var preloadArtifacts = GenerateTestArtifacts(500, tenantId, bomRef);
foreach (var artifact in preloadArtifacts)
{
await store.StoreAsync(artifact);
}
var random = new Random(42); // Deterministic seed for reproducibility
var sw = Stopwatch.StartNew();
// Act - Mix of operations
for (var i = 0; i < operationCount; i++)
{
var op = random.Next(4);
switch (op)
{
case 0: // Store
using (var stream = new MemoryStream(new byte[] { (byte)(i % 256) }))
{
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:mixed-{i}",
ArtifactId = $"mixed-artifact-{i}",
Content = stream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = tenantId
});
}
break;
case 1: // Read existing
var idx = random.Next(preloadArtifacts.Count);
await store.ReadAsync(bomRef, preloadArtifacts[idx].SerialNumber, preloadArtifacts[idx].ArtifactId);
break;
case 2: // List
await store.ListAsync(bomRef);
break;
case 3: // Exists check
var checkIdx = random.Next(preloadArtifacts.Count);
await store.ExistsAsync(bomRef, preloadArtifacts[checkIdx].SerialNumber!, preloadArtifacts[checkIdx].ArtifactId);
break;
}
}
sw.Stop();
// Assert
_output.WriteLine($"Completed {operationCount} mixed operations in {sw.ElapsedMilliseconds}ms");
_output.WriteLine($"Operations/second: {operationCount / sw.Elapsed.TotalSeconds:F2}");
}
private static List<ArtifactStoreRequest> GenerateTestArtifacts(int count, Guid tenantId, string bomRef)
{
var artifacts = new List<ArtifactStoreRequest>();
for (var i = 0; i < count; i++)
{
var content = System.Text.Encoding.UTF8.GetBytes($"{{\"index\": {i}, \"data\": \"test-{Guid.NewGuid()}\"}}");
artifacts.Add(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:{Guid.NewGuid()}",
ArtifactId = $"artifact-{i:D5}",
Content = new MemoryStream(content),
ContentType = "application/json",
Type = (ArtifactType)(i % 5), // Rotate through types
TenantId = tenantId
});
}
return artifacts;
}
}

View File

@@ -0,0 +1,387 @@
// -----------------------------------------------------------------------------
// ArtifactStoreTests.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Tasks: AS-001, AS-002, AS-003 - Unit tests
// Description: Unit tests for unified artifact store
// -----------------------------------------------------------------------------
using StellaOps.Artifact.Core;
using StellaOps.Artifact.Infrastructure;
using Xunit;
namespace StellaOps.Artifact.Tests;
[Trait("Category", "Unit")]
public sealed class ArtifactStoreTests
{
[Fact]
public async Task InMemoryStore_StoreAndRead_Succeeds()
{
var store = new InMemoryArtifactStore();
var content = System.Text.Encoding.UTF8.GetBytes("{\"test\": true}");
using var contentStream = new MemoryStream(content);
var request = new ArtifactStoreRequest
{
BomRef = "pkg:docker/test/app@sha256:abc123",
SerialNumber = "urn:uuid:12345678-1234-1234-1234-123456789012",
ArtifactId = "artifact-001",
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
};
var storeResult = await store.StoreAsync(request);
Assert.True(storeResult.Success);
Assert.True(storeResult.WasCreated);
Assert.NotNull(storeResult.Sha256);
Assert.Equal(content.Length, storeResult.SizeBytes);
// Read it back
var readResult = await store.ReadAsync(
request.BomRef,
request.SerialNumber,
request.ArtifactId);
Assert.True(readResult.Found);
Assert.NotNull(readResult.Content);
Assert.NotNull(readResult.Metadata);
Assert.Equal(request.BomRef, readResult.Metadata.BomRef);
}
[Fact]
public async Task InMemoryStore_List_ReturnsMatchingArtifacts()
{
var store = new InMemoryArtifactStore();
var bomRef = "pkg:docker/test/app@sha256:abc123";
var tenantId = Guid.NewGuid();
// Store two artifacts with same bom-ref
for (var i = 0; i < 2; i++)
{
using var contentStream = new MemoryStream(new byte[] { (byte)i });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = $"urn:uuid:serial-{i}",
ArtifactId = $"artifact-{i}",
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = tenantId
});
}
// Store one with different bom-ref
using var otherStream = new MemoryStream(new byte[] { 99 });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = "pkg:docker/other/app@sha256:xyz",
SerialNumber = "urn:uuid:other",
ArtifactId = "artifact-other",
Content = otherStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = tenantId
});
var list = await store.ListAsync(bomRef);
Assert.Equal(2, list.Count);
Assert.All(list, a => Assert.Equal(bomRef, a.BomRef));
}
[Fact]
public async Task InMemoryStore_Exists_ReturnsTrueForExisting()
{
var store = new InMemoryArtifactStore();
var bomRef = "pkg:docker/test/app@sha256:abc123";
var serial = "urn:uuid:12345678-1234-1234-1234-123456789012";
var artifactId = "artifact-001";
using var contentStream = new MemoryStream(new byte[] { 1, 2, 3 });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = serial,
ArtifactId = artifactId,
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
});
Assert.True(await store.ExistsAsync(bomRef, serial, artifactId));
Assert.False(await store.ExistsAsync(bomRef, serial, "nonexistent"));
}
[Fact]
public async Task InMemoryStore_Delete_RemovesArtifact()
{
var store = new InMemoryArtifactStore();
var bomRef = "pkg:docker/test/app@sha256:abc123";
var serial = "urn:uuid:12345678-1234-1234-1234-123456789012";
var artifactId = "artifact-001";
using var contentStream = new MemoryStream(new byte[] { 1, 2, 3 });
await store.StoreAsync(new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = serial,
ArtifactId = artifactId,
Content = contentStream,
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
});
Assert.True(await store.ExistsAsync(bomRef, serial, artifactId));
var deleted = await store.DeleteAsync(bomRef, serial, artifactId);
Assert.True(deleted);
Assert.False(await store.ExistsAsync(bomRef, serial, artifactId));
}
[Fact]
public async Task InMemoryStore_StoreExisting_ReturnsWasCreatedFalse()
{
var store = new InMemoryArtifactStore();
var request = new ArtifactStoreRequest
{
BomRef = "pkg:docker/test/app@sha256:abc123",
SerialNumber = "urn:uuid:12345678-1234-1234-1234-123456789012",
ArtifactId = "artifact-001",
Content = new MemoryStream(new byte[] { 1 }),
ContentType = "application/json",
Type = ArtifactType.Sbom,
TenantId = Guid.NewGuid()
};
var first = await store.StoreAsync(request);
Assert.True(first.WasCreated);
// Store again (with new stream)
request = request with { Content = new MemoryStream(new byte[] { 2 }) };
var second = await store.StoreAsync(request);
Assert.False(second.WasCreated);
}
}
[Trait("Category", "Unit")]
public sealed class BomRefEncoderTests
{
[Theory]
[InlineData("pkg:docker/acme/api@sha256:abc", "pkg_docker_acme_api_at_sha256_abc")]
[InlineData("simple-ref", "simple-ref")]
[InlineData("ref/with/slashes", "ref_with_slashes")]
[InlineData("pkg:npm/@scope/pkg", "pkg_npm__at_scope_pkg")]
public void Encode_HandlesSpecialCharacters(string input, string expected)
{
var result = BomRefEncoder.Encode(input);
Assert.Equal(expected, result);
}
[Fact]
public void BuildPath_CreatesCorrectStructure()
{
var bomRef = "pkg:docker/acme/api@sha256:abc";
var serial = "urn:uuid:12345";
var artifactId = "envelope-001";
var path = BomRefEncoder.BuildPath(bomRef, serial, artifactId);
Assert.StartsWith("artifacts/", path);
Assert.EndsWith(".json", path);
Assert.Contains("envelope-001", path);
}
[Fact]
public void Encode_EmptyInput_ReturnsUnknown()
{
Assert.Equal("unknown", BomRefEncoder.Encode(""));
Assert.Equal("unknown", BomRefEncoder.Encode(" "));
}
}
[Trait("Category", "Unit")]
public sealed class CycloneDxExtractorTests
{
private readonly CycloneDxExtractor _extractor = new();
[Fact]
public async Task ExtractAsync_ValidCycloneDx_ExtractsMetadata()
{
var sbom = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.5",
"serialNumber": "urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79",
"version": 1,
"metadata": {
"timestamp": "2026-01-18T12:00:00Z",
"component": {
"type": "application",
"bom-ref": "acme-app",
"name": "ACME Application",
"version": "1.0.0",
"purl": "pkg:docker/acme/app@1.0.0"
}
},
"components": [
{
"type": "library",
"bom-ref": "component-1",
"name": "some-lib",
"purl": "pkg:npm/some-lib@1.0.0"
}
]
}
""";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sbom));
var result = await _extractor.ExtractAsync(stream);
Assert.True(result.Success);
Assert.Equal("urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", result.SerialNumber);
Assert.Equal("1.5", result.SpecVersion);
Assert.Equal(1, result.Version);
Assert.Equal("acme-app", result.PrimaryBomRef);
Assert.Equal("ACME Application", result.PrimaryName);
Assert.Equal("1.0.0", result.PrimaryVersion);
Assert.Equal("pkg:docker/acme/app@1.0.0", result.PrimaryPurl);
Assert.Single(result.ComponentBomRefs);
Assert.Single(result.ComponentPurls);
}
[Fact]
public async Task ExtractAsync_MissingOptionalFields_Succeeds()
{
var sbom = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.4",
"version": 1,
"components": []
}
""";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(sbom));
var result = await _extractor.ExtractAsync(stream);
Assert.True(result.Success);
Assert.Null(result.SerialNumber);
Assert.Equal("1.4", result.SpecVersion);
Assert.Null(result.PrimaryBomRef);
}
[Fact]
public async Task ExtractAsync_InvalidJson_ReturnsError()
{
var invalid = "not valid json";
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(invalid));
var result = await _extractor.ExtractAsync(stream);
Assert.False(result.Success);
Assert.NotNull(result.Error);
}
}
[Trait("Category", "Unit")]
public sealed class ArtifactIndexRepositoryTests
{
[Fact]
public async Task InMemoryIndex_IndexAndFind_Succeeds()
{
var repo = new InMemoryArtifactIndexRepository();
var entry = new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app@sha256:abc",
SerialNumber = "urn:uuid:12345",
ArtifactId = "artifact-001",
StorageKey = "artifacts/test/artifact-001.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = "abc123",
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
};
await repo.IndexAsync(entry);
var found = await repo.FindByBomRefAsync(entry.BomRef);
Assert.Single(found);
Assert.Equal(entry.ArtifactId, found[0].ArtifactId);
}
[Fact]
public async Task InMemoryIndex_Remove_SoftDeletes()
{
var repo = new InMemoryArtifactIndexRepository();
var entry = new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app@sha256:abc",
SerialNumber = "urn:uuid:12345",
ArtifactId = "artifact-001",
StorageKey = "artifacts/test/artifact-001.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = "abc123",
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
};
await repo.IndexAsync(entry);
await repo.RemoveAsync(entry.BomRef, entry.SerialNumber, entry.ArtifactId);
var found = await repo.FindByBomRefAsync(entry.BomRef);
Assert.Empty(found);
}
[Fact]
public async Task InMemoryIndex_FindBySha256_ReturnsMatches()
{
var repo = new InMemoryArtifactIndexRepository();
var sha256 = "abc123def456";
await repo.IndexAsync(new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app1",
SerialNumber = "urn:uuid:1",
ArtifactId = "artifact-1",
StorageKey = "artifacts/1.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = sha256,
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
});
await repo.IndexAsync(new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = Guid.NewGuid(),
BomRef = "pkg:docker/test/app2",
SerialNumber = "urn:uuid:2",
ArtifactId = "artifact-2",
StorageKey = "artifacts/2.json",
Type = ArtifactType.Sbom,
ContentType = "application/json",
Sha256 = sha256,
SizeBytes = 1024,
CreatedAt = DateTimeOffset.UtcNow
});
var found = await repo.FindBySha256Async(sha256);
Assert.Equal(2, found.Count);
}
}

View File

@@ -0,0 +1,31 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>StellaOps.Artifact.Tests</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit" />
<PackageReference Include="xunit.runner.visualstudio">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="coverlet.collector">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Artifact.Core\StellaOps.Artifact.Core.csproj" />
<ProjectReference Include="..\StellaOps.Artifact.Infrastructure\StellaOps.Artifact.Infrastructure.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,609 @@
// -----------------------------------------------------------------------------
// ArtifactController.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Tasks: AS-005 - Create artifact submission endpoint
// AS-007 - Query endpoint for artifacts by bom-ref
// Description: API controller for unified artifact storage
// -----------------------------------------------------------------------------
using System.ComponentModel.DataAnnotations;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Api;
/// <summary>
/// API controller for unified artifact storage operations.
/// </summary>
[ApiController]
[Route("api/v1/artifacts")]
[Produces("application/json")]
[Authorize]
public sealed class ArtifactController : ControllerBase
{
private readonly IArtifactStore _artifactStore;
private readonly ICycloneDxExtractor _cycloneDxExtractor;
private readonly ILogger<ArtifactController> _logger;
public ArtifactController(
IArtifactStore artifactStore,
ICycloneDxExtractor cycloneDxExtractor,
ILogger<ArtifactController> logger)
{
_artifactStore = artifactStore ?? throw new ArgumentNullException(nameof(artifactStore));
_cycloneDxExtractor = cycloneDxExtractor ?? throw new ArgumentNullException(nameof(cycloneDxExtractor));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Submits an artifact to the unified store.
/// </summary>
/// <param name="request">Artifact submission request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Created artifact metadata.</returns>
[HttpPost]
[ProducesResponseType(typeof(ArtifactSubmissionResponse), StatusCodes.Status201Created)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status400BadRequest)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status500InternalServerError)]
public async Task<IActionResult> SubmitArtifact(
[FromBody] ArtifactSubmissionRequest request,
CancellationToken ct)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
try
{
// Validate bom-ref format (should be a valid purl or bom-ref)
if (string.IsNullOrWhiteSpace(request.BomRef))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid bom_ref",
Detail = "bom_ref is required and must be a valid Package URL or CycloneDX bom-ref"
});
}
// Get or generate serial number
var serialNumber = request.CyclonedxSerial ?? GenerateSyntheticSerial(request.BomRef);
// Decode base64 content if provided
byte[] content;
if (!string.IsNullOrEmpty(request.ContentBase64))
{
try
{
content = Convert.FromBase64String(request.ContentBase64);
}
catch (FormatException)
{
return BadRequest(new ProblemDetails
{
Title = "Invalid content",
Detail = "content_base64 must be valid Base64-encoded data"
});
}
}
else if (request.DsseUri != null)
{
// Fetch content from URI (S3, HTTP, etc.)
content = await FetchContentFromUri(request.DsseUri, ct);
}
else
{
return BadRequest(new ProblemDetails
{
Title = "Missing content",
Detail = "Either content_base64 or dsse_uri must be provided"
});
}
// Generate artifact ID if not provided
var artifactId = request.ArtifactId ?? Guid.NewGuid().ToString();
// Determine content type
var contentType = request.ContentType ?? DetermineContentType(request.ArtifactType);
// Get tenant from context
var tenantId = GetTenantId();
// Store the artifact
using var contentStream = new MemoryStream(content);
var storeRequest = new ArtifactStoreRequest
{
BomRef = request.BomRef,
SerialNumber = serialNumber,
ArtifactId = artifactId,
Content = contentStream,
ContentType = contentType,
Type = ParseArtifactType(request.ArtifactType),
Metadata = request.Metadata,
TenantId = tenantId,
Overwrite = request.Overwrite ?? false
};
var result = await _artifactStore.StoreAsync(storeRequest, ct);
if (!result.Success)
{
return StatusCode(StatusCodes.Status500InternalServerError, new ProblemDetails
{
Title = "Storage failed",
Detail = result.ErrorMessage
});
}
var response = new ArtifactSubmissionResponse
{
ArtifactId = artifactId,
BomRef = request.BomRef,
SerialNumber = serialNumber,
StorageKey = result.StorageKey!,
Sha256 = result.Sha256!,
SizeBytes = result.SizeBytes!.Value,
WasCreated = result.WasCreated,
CreatedAt = DateTimeOffset.UtcNow
};
_logger.LogInformation(
"Artifact submitted: {ArtifactId} for bom-ref {BomRef}",
artifactId, request.BomRef);
return CreatedAtAction(
nameof(GetArtifact),
new { bomRef = request.BomRef, serialNumber, artifactId },
response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to submit artifact");
return StatusCode(StatusCodes.Status500InternalServerError, new ProblemDetails
{
Title = "Internal error",
Detail = "An unexpected error occurred while storing the artifact"
});
}
}
/// <summary>
/// Lists artifacts by bom-ref with optional filters.
/// </summary>
/// <param name="bomRef">Required bom-ref filter.</param>
/// <param name="serialNumber">Optional serial number filter.</param>
/// <param name="from">Optional start date filter.</param>
/// <param name="to">Optional end date filter.</param>
/// <param name="limit">Maximum results (default 100).</param>
/// <param name="continuationToken">Pagination token.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of artifact metadata.</returns>
[HttpGet]
[ProducesResponseType(typeof(ArtifactListResponse), StatusCodes.Status200OK)]
[ProducesResponseType(typeof(ProblemDetails), StatusCodes.Status400BadRequest)]
public async Task<IActionResult> ListArtifacts(
[FromQuery(Name = "bom_ref"), Required] string bomRef,
[FromQuery(Name = "serial_number")] string? serialNumber,
[FromQuery] DateTimeOffset? from,
[FromQuery] DateTimeOffset? to,
[FromQuery] int limit = 100,
[FromQuery(Name = "continuation_token")] string? continuationToken,
CancellationToken ct)
{
if (string.IsNullOrWhiteSpace(bomRef))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "bom_ref query parameter is required"
});
}
if (limit < 1 || limit > 1000)
{
limit = 100;
}
try
{
var artifacts = await _artifactStore.ListAsync(bomRef, serialNumber, ct);
// Apply time filters if provided
if (from.HasValue)
{
artifacts = artifacts.Where(a => a.CreatedAt >= from.Value).ToList();
}
if (to.HasValue)
{
artifacts = artifacts.Where(a => a.CreatedAt < to.Value).ToList();
}
// Apply pagination
var offset = ParseContinuationToken(continuationToken);
var totalCount = artifacts.Count;
var pagedArtifacts = artifacts.Skip(offset).Take(limit).ToList();
// Generate next continuation token if there are more results
string? nextToken = null;
if (offset + limit < totalCount)
{
nextToken = GenerateContinuationToken(offset + limit);
}
var response = new ArtifactListResponse
{
Artifacts = pagedArtifacts.Select(a => new ArtifactListItem
{
ArtifactId = a.ArtifactId,
BomRef = a.BomRef,
SerialNumber = a.SerialNumber,
StorageKey = a.StorageKey,
ContentType = a.ContentType,
Sha256 = a.Sha256,
SizeBytes = a.SizeBytes,
CreatedAt = a.CreatedAt,
ArtifactType = a.Type.ToString()
}).ToList(),
Total = totalCount,
ContinuationToken = nextToken
};
return Ok(response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to list artifacts for bom-ref {BomRef}", bomRef);
return StatusCode(StatusCodes.Status500InternalServerError, new ProblemDetails
{
Title = "Internal error",
Detail = "An unexpected error occurred while listing artifacts"
});
}
}
/// <summary>
/// Gets a specific artifact by its composite key.
/// </summary>
[HttpGet("{bomRef}/{serialNumber}/{artifactId}")]
[ProducesResponseType(typeof(ArtifactMetadataResponse), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> GetArtifact(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var metadata = await _artifactStore.GetMetadataAsync(decodedBomRef, decodedSerial, artifactId, ct);
if (metadata == null)
{
return NotFound(new ProblemDetails
{
Title = "Not found",
Detail = $"Artifact not found: {artifactId}"
});
}
return Ok(new ArtifactMetadataResponse
{
ArtifactId = metadata.ArtifactId,
BomRef = metadata.BomRef,
SerialNumber = metadata.SerialNumber,
StorageKey = metadata.StorageKey,
ContentType = metadata.ContentType,
Sha256 = metadata.Sha256,
SizeBytes = metadata.SizeBytes,
CreatedAt = metadata.CreatedAt,
ArtifactType = metadata.Type.ToString()
});
}
/// <summary>
/// Downloads artifact content.
/// </summary>
[HttpGet("{bomRef}/{serialNumber}/{artifactId}/content")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DownloadArtifact(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var result = await _artifactStore.ReadAsync(decodedBomRef, decodedSerial, artifactId, ct);
if (!result.Found || result.Content == null)
{
return NotFound(new ProblemDetails
{
Title = "Not found",
Detail = result.ErrorMessage ?? $"Artifact not found: {artifactId}"
});
}
return File(result.Content, result.Metadata!.ContentType, $"{artifactId}.json");
}
/// <summary>
/// Deletes an artifact (soft delete).
/// </summary>
[HttpDelete("{bomRef}/{serialNumber}/{artifactId}")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<IActionResult> DeleteArtifact(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct)
{
var decodedBomRef = Uri.UnescapeDataString(bomRef);
var decodedSerial = Uri.UnescapeDataString(serialNumber);
var deleted = await _artifactStore.DeleteAsync(decodedBomRef, decodedSerial, artifactId, ct);
if (!deleted)
{
return NotFound(new ProblemDetails
{
Title = "Not found",
Detail = $"Artifact not found: {artifactId}"
});
}
return NoContent();
}
private Guid GetTenantId()
{
// TODO: Extract tenant ID from authenticated user context
var tenantClaim = User.FindFirst("tenant_id")?.Value;
return Guid.TryParse(tenantClaim, out var id) ? id : Guid.Empty;
}
private static string GenerateSyntheticSerial(string bomRef)
{
// Generate a deterministic serial based on bom-ref SHA-256
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(bomRef));
var guid = new Guid(hash.Take(16).ToArray());
return $"urn:uuid:{guid}";
}
private static ArtifactType ParseArtifactType(string? type)
{
if (string.IsNullOrEmpty(type))
return ArtifactType.Unknown;
return Enum.TryParse<ArtifactType>(type, ignoreCase: true, out var result)
? result
: ArtifactType.Unknown;
}
private static string DetermineContentType(string? artifactType)
{
return artifactType?.ToLowerInvariant() switch
{
"sbom" => "application/vnd.cyclonedx+json",
"vex" => "application/vnd.openvex+json",
"dsseenvelope" => "application/vnd.dsse+json",
"rekorproof" => "application/json",
_ => "application/json"
};
}
/// <summary>
/// Fetches content from a URI (S3, HTTP, file).
/// Sprint: SPRINT_20260118_017 (AS-005) - Validates dsse_uri accessibility
/// </summary>
private async Task<byte[]> FetchContentFromUri(string uri, CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(uri);
// Validate URI format
if (!Uri.TryCreate(uri, UriKind.Absolute, out var parsedUri))
{
throw new ArgumentException($"Invalid URI format: {uri}");
}
return parsedUri.Scheme.ToLowerInvariant() switch
{
"s3" => await FetchFromS3Async(parsedUri, ct),
"http" or "https" => await FetchFromHttpAsync(parsedUri, ct),
"file" => await FetchFromFileAsync(parsedUri, ct),
_ => throw new NotSupportedException($"URI scheme not supported: {parsedUri.Scheme}")
};
}
private async Task<byte[]> FetchFromS3Async(Uri uri, CancellationToken ct)
{
// Parse S3 URI: s3://bucket/key
var bucket = uri.Host;
var key = uri.AbsolutePath.TrimStart('/');
_logger.LogDebug("Fetching from S3: bucket={Bucket}, key={Key}", bucket, key);
// Validate accessibility by checking existence first
// This would use the S3 client from DI in a real implementation
// For now, we document the expected behavior and throw
throw new NotImplementedException(
$"S3 fetch not fully implemented. Configure S3 client. URI: s3://{bucket}/{key}");
}
private async Task<byte[]> FetchFromHttpAsync(Uri uri, CancellationToken ct)
{
_logger.LogDebug("Fetching from HTTP: {Uri}", uri);
// Use HttpClient from DI for proper lifecycle management
using var httpClient = new HttpClient();
httpClient.Timeout = TimeSpan.FromSeconds(30);
try
{
// HEAD request first to validate accessibility
using var headRequest = new HttpRequestMessage(HttpMethod.Head, uri);
using var headResponse = await httpClient.SendAsync(headRequest, ct);
if (!headResponse.IsSuccessStatusCode)
{
throw new InvalidOperationException(
$"URI not accessible: {uri} returned {headResponse.StatusCode}");
}
// Check content length to prevent fetching huge files
var contentLength = headResponse.Content.Headers.ContentLength;
if (contentLength > 100 * 1024 * 1024) // 100MB max
{
throw new InvalidOperationException(
$"Content too large: {contentLength} bytes exceeds 100MB limit");
}
// Now fetch the actual content
return await httpClient.GetByteArrayAsync(uri, ct);
}
catch (HttpRequestException ex)
{
throw new InvalidOperationException($"Failed to fetch from {uri}: {ex.Message}", ex);
}
}
private async Task<byte[]> FetchFromFileAsync(Uri uri, CancellationToken ct)
{
var filePath = uri.LocalPath;
_logger.LogDebug("Fetching from file: {Path}", filePath);
if (!System.IO.File.Exists(filePath))
{
throw new FileNotFoundException($"File not accessible: {filePath}");
}
var fileInfo = new FileInfo(filePath);
if (fileInfo.Length > 100 * 1024 * 1024) // 100MB max
{
throw new InvalidOperationException(
$"File too large: {fileInfo.Length} bytes exceeds 100MB limit");
}
return await System.IO.File.ReadAllBytesAsync(filePath, ct);
}
private static int ParseContinuationToken(string? token)
{
if (string.IsNullOrEmpty(token))
return 0;
try
{
var decoded = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(token));
return int.TryParse(decoded, out var offset) ? offset : 0;
}
catch
{
return 0;
}
}
private static string GenerateContinuationToken(int offset)
{
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(offset.ToString()));
}
}
/// <summary>
/// Request to submit an artifact.
/// </summary>
public sealed record ArtifactSubmissionRequest
{
/// <summary>Package URL or CycloneDX bom-ref.</summary>
[Required]
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber (optional, generated if missing).</summary>
public string? CyclonedxSerial { get; init; }
/// <summary>Artifact ID (optional, generated if missing).</summary>
public string? ArtifactId { get; init; }
/// <summary>Base64-encoded content.</summary>
public string? ContentBase64 { get; init; }
/// <summary>URI to fetch content from (S3, HTTP).</summary>
public string? DsseUri { get; init; }
/// <summary>Content type (optional, inferred from artifact_type).</summary>
public string? ContentType { get; init; }
/// <summary>Artifact type: Sbom, Vex, DsseEnvelope, etc.</summary>
public string? ArtifactType { get; init; }
/// <summary>Rekor transparency log UUID (optional).</summary>
public string? RekorUuid { get; init; }
/// <summary>Additional metadata.</summary>
public Dictionary<string, string>? Metadata { get; init; }
/// <summary>Whether to overwrite existing artifact.</summary>
public bool? Overwrite { get; init; }
}
/// <summary>
/// Response from artifact submission.
/// </summary>
public sealed record ArtifactSubmissionResponse
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required bool WasCreated { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Response for listing artifacts.
/// </summary>
public sealed record ArtifactListResponse
{
public required IReadOnlyList<ArtifactListItem> Artifacts { get; init; }
public required int Total { get; init; }
public string? ContinuationToken { get; init; }
}
/// <summary>
/// Item in artifact list response.
/// </summary>
public sealed record ArtifactListItem
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string ContentType { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ArtifactType { get; init; }
}
/// <summary>
/// Response for artifact metadata.
/// </summary>
public sealed record ArtifactMetadataResponse
{
public required string ArtifactId { get; init; }
public required string BomRef { get; init; }
public required string SerialNumber { get; init; }
public required string StorageKey { get; init; }
public required string ContentType { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ArtifactType { get; init; }
}

View File

@@ -0,0 +1,517 @@
// -----------------------------------------------------------------------------
// CycloneDxExtractor.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-004 - Implement bom-ref extraction from CycloneDX
// Description: Standalone service for extracting metadata from CycloneDX SBOMs
// -----------------------------------------------------------------------------
using System.Text.Json;
using System.Xml;
using System.Xml.Linq;
namespace StellaOps.Artifact.Core;
/// <summary>
/// Extracts metadata from CycloneDX SBOM documents.
/// </summary>
public interface ICycloneDxExtractor
{
/// <summary>
/// Extracts metadata from a CycloneDX JSON document.
/// </summary>
CycloneDxMetadata Extract(JsonDocument document);
/// <summary>
/// Extracts metadata from a CycloneDX JSON stream.
/// </summary>
Task<CycloneDxMetadata> ExtractAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Extracts metadata from a CycloneDX XML document.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
CycloneDxMetadata ExtractFromXml(XDocument document);
/// <summary>
/// Extracts metadata from a CycloneDX XML stream.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
Task<CycloneDxMetadata> ExtractFromXmlAsync(Stream stream, CancellationToken ct = default);
/// <summary>
/// Auto-detects format (JSON or XML) and extracts metadata.
/// Sprint: SPRINT_20260118_017 (AS-004)
/// </summary>
Task<CycloneDxMetadata> ExtractAutoAsync(Stream stream, CancellationToken ct = default);
}
/// <summary>
/// Extracted metadata from a CycloneDX document.
/// </summary>
public sealed record CycloneDxMetadata
{
/// <summary>SBOM serial number (URN).</summary>
public string? SerialNumber { get; init; }
/// <summary>SBOM version.</summary>
public int Version { get; init; }
/// <summary>CycloneDX spec version.</summary>
public string? SpecVersion { get; init; }
/// <summary>Primary component bom-ref.</summary>
public string? PrimaryBomRef { get; init; }
/// <summary>Primary component name.</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>Primary component purl.</summary>
public string? PrimaryPurl { get; init; }
/// <summary>All component bom-refs.</summary>
public IReadOnlyList<string> ComponentBomRefs { get; init; } = [];
/// <summary>All component purls.</summary>
public IReadOnlyList<string> ComponentPurls { get; init; } = [];
/// <summary>Total component count.</summary>
public int ComponentCount { get; init; }
/// <summary>Timestamp from metadata.</summary>
public DateTimeOffset? Timestamp { get; init; }
/// <summary>Extraction succeeded.</summary>
public bool Success { get; init; }
/// <summary>Extraction error if failed.</summary>
public string? Error { get; init; }
}
/// <summary>
/// Default implementation of CycloneDX extractor.
/// </summary>
public sealed class CycloneDxExtractor : ICycloneDxExtractor
{
/// <inheritdoc />
public CycloneDxMetadata Extract(JsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
try
{
var root = document.RootElement;
// Extract serial number
string? serialNumber = null;
if (root.TryGetProperty("serialNumber", out var serialProp))
{
serialNumber = serialProp.GetString();
}
// Extract version
int version = 1;
if (root.TryGetProperty("version", out var versionProp))
{
version = versionProp.GetInt32();
}
// Extract spec version
string? specVersion = null;
if (root.TryGetProperty("specVersion", out var specProp))
{
specVersion = specProp.GetString();
}
// Extract primary component from metadata
string? primaryBomRef = null;
string? primaryName = null;
string? primaryVersion = null;
string? primaryPurl = null;
if (root.TryGetProperty("metadata", out var metadata))
{
if (metadata.TryGetProperty("component", out var primaryComponent))
{
primaryBomRef = GetStringProperty(primaryComponent, "bom-ref");
primaryName = GetStringProperty(primaryComponent, "name");
primaryVersion = GetStringProperty(primaryComponent, "version");
primaryPurl = GetStringProperty(primaryComponent, "purl");
}
}
// Extract timestamp
DateTimeOffset? timestamp = null;
if (root.TryGetProperty("metadata", out var meta2) &&
meta2.TryGetProperty("timestamp", out var tsProp))
{
if (DateTimeOffset.TryParse(tsProp.GetString(), out var ts))
{
timestamp = ts;
}
}
// Extract all component bom-refs and purls
var bomRefs = new List<string>();
var purls = new List<string>();
int componentCount = 0;
if (root.TryGetProperty("components", out var components))
{
foreach (var component in components.EnumerateArray())
{
componentCount++;
var bomRef = GetStringProperty(component, "bom-ref");
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = GetStringProperty(component, "purl");
if (purl != null)
{
purls.Add(purl);
}
// Recursively extract from nested components
ExtractNestedComponents(component, bomRefs, purls, ref componentCount);
}
}
return new CycloneDxMetadata
{
SerialNumber = serialNumber,
Version = version,
SpecVersion = specVersion,
PrimaryBomRef = primaryBomRef,
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
PrimaryPurl = primaryPurl,
ComponentBomRefs = bomRefs,
ComponentPurls = purls,
ComponentCount = componentCount,
Timestamp = timestamp,
Success = true
};
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractAsync(Stream stream, CancellationToken ct = default)
{
try
{
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: ct);
return Extract(document);
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
private static string? GetStringProperty(JsonElement element, string propertyName)
{
return element.TryGetProperty(propertyName, out var prop) ? prop.GetString() : null;
}
private static void ExtractNestedComponents(
JsonElement component,
List<string> bomRefs,
List<string> purls,
ref int count)
{
if (!component.TryGetProperty("components", out var nested))
return;
foreach (var child in nested.EnumerateArray())
{
count++;
var bomRef = GetStringProperty(child, "bom-ref");
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = GetStringProperty(child, "purl");
if (purl != null)
{
purls.Add(purl);
}
// Recurse
ExtractNestedComponents(child, bomRefs, purls, ref count);
}
}
// -------------------------------------------------------------------------
// XML Parsing - Sprint: SPRINT_20260118_017 (AS-004)
// -------------------------------------------------------------------------
private static readonly XNamespace Cdx14 = "http://cyclonedx.org/schema/bom/1.4";
private static readonly XNamespace Cdx15 = "http://cyclonedx.org/schema/bom/1.5";
private static readonly XNamespace Cdx16 = "http://cyclonedx.org/schema/bom/1.6";
/// <inheritdoc />
public CycloneDxMetadata ExtractFromXml(XDocument document)
{
ArgumentNullException.ThrowIfNull(document);
try
{
var root = document.Root;
if (root == null)
{
return new CycloneDxMetadata { Success = false, Error = "Empty XML document" };
}
// Detect namespace
var ns = DetectNamespace(root);
// Extract serial number (attribute on root)
string? serialNumber = root.Attribute("serialNumber")?.Value;
// Extract version
int version = 1;
var versionAttr = root.Attribute("version")?.Value;
if (int.TryParse(versionAttr, out var v))
{
version = v;
}
// Extract spec version from namespace
string? specVersion = ExtractSpecVersion(ns);
// Extract primary component from metadata
string? primaryBomRef = null;
string? primaryName = null;
string? primaryVersion = null;
string? primaryPurl = null;
var metadata = root.Element(ns + "metadata");
if (metadata != null)
{
var primaryComponent = metadata.Element(ns + "component");
if (primaryComponent != null)
{
primaryBomRef = primaryComponent.Attribute("bom-ref")?.Value;
primaryName = primaryComponent.Element(ns + "name")?.Value;
primaryVersion = primaryComponent.Element(ns + "version")?.Value;
primaryPurl = primaryComponent.Element(ns + "purl")?.Value;
}
}
// Extract timestamp
DateTimeOffset? timestamp = null;
var tsElement = metadata?.Element(ns + "timestamp");
if (tsElement != null && DateTimeOffset.TryParse(tsElement.Value, out var ts))
{
timestamp = ts;
}
// Extract all components
var bomRefs = new List<string>();
var purls = new List<string>();
int componentCount = 0;
var componentsElement = root.Element(ns + "components");
if (componentsElement != null)
{
ExtractXmlComponents(componentsElement, ns, bomRefs, purls, ref componentCount);
}
return new CycloneDxMetadata
{
SerialNumber = serialNumber,
Version = version,
SpecVersion = specVersion,
PrimaryBomRef = primaryBomRef,
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
PrimaryPurl = primaryPurl,
ComponentBomRefs = bomRefs,
ComponentPurls = purls,
ComponentCount = componentCount,
Timestamp = timestamp,
Success = true
};
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractFromXmlAsync(Stream stream, CancellationToken ct = default)
{
try
{
var settings = new XmlReaderSettings
{
Async = true,
DtdProcessing = DtdProcessing.Prohibit,
XmlResolver = null
};
using var reader = XmlReader.Create(stream, settings);
var document = await XDocument.LoadAsync(reader, LoadOptions.None, ct);
return ExtractFromXml(document);
}
catch (Exception ex)
{
return new CycloneDxMetadata
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc />
public async Task<CycloneDxMetadata> ExtractAutoAsync(Stream stream, CancellationToken ct = default)
{
// Read first bytes to detect format
var buffer = new byte[1];
var bytesRead = await stream.ReadAsync(buffer, ct);
if (bytesRead == 0)
{
return new CycloneDxMetadata { Success = false, Error = "Empty stream" };
}
// Reset stream
stream.Position = 0;
// Detect format by first character
char firstChar = (char)buffer[0];
// Skip BOM if present
if (firstChar == '\uFEFF')
{
buffer = new byte[1];
await stream.ReadAsync(buffer, ct);
stream.Position = 0;
// Skip 3-byte UTF-8 BOM
if (stream.Length >= 3)
{
var bomBuffer = new byte[3];
await stream.ReadAsync(bomBuffer, ct);
if (bomBuffer[0] == 0xEF && bomBuffer[1] == 0xBB && bomBuffer[2] == 0xBF)
{
firstChar = (char)stream.ReadByte();
stream.Position = 3; // After BOM
}
else
{
stream.Position = 0;
firstChar = (char)buffer[0];
}
}
}
stream.Position = 0;
if (firstChar == '{' || firstChar == '[')
{
// JSON format
return await ExtractAsync(stream, ct);
}
else if (firstChar == '<')
{
// XML format
return await ExtractFromXmlAsync(stream, ct);
}
else
{
// Try JSON first, fallback to XML
try
{
return await ExtractAsync(stream, ct);
}
catch
{
stream.Position = 0;
return await ExtractFromXmlAsync(stream, ct);
}
}
}
private static XNamespace DetectNamespace(XElement root)
{
var ns = root.Name.Namespace;
if (ns == Cdx16 || ns.NamespaceName.Contains("1.6"))
return Cdx16;
if (ns == Cdx15 || ns.NamespaceName.Contains("1.5"))
return Cdx15;
if (ns == Cdx14 || ns.NamespaceName.Contains("1.4"))
return Cdx14;
// Default to detected namespace
return ns;
}
private static string? ExtractSpecVersion(XNamespace ns)
{
if (ns == Cdx16 || ns.NamespaceName.Contains("1.6"))
return "1.6";
if (ns == Cdx15 || ns.NamespaceName.Contains("1.5"))
return "1.5";
if (ns == Cdx14 || ns.NamespaceName.Contains("1.4"))
return "1.4";
return null;
}
private static void ExtractXmlComponents(
XElement componentsElement,
XNamespace ns,
List<string> bomRefs,
List<string> purls,
ref int count)
{
foreach (var component in componentsElement.Elements(ns + "component"))
{
count++;
var bomRef = component.Attribute("bom-ref")?.Value;
if (bomRef != null)
{
bomRefs.Add(bomRef);
}
var purl = component.Element(ns + "purl")?.Value;
if (purl != null)
{
purls.Add(purl);
}
// Recurse into nested components
var nested = component.Element(ns + "components");
if (nested != null)
{
ExtractXmlComponents(nested, ns, bomRefs, purls, ref count);
}
}
}
}

View File

@@ -0,0 +1,379 @@
// -----------------------------------------------------------------------------
// IArtifactStore.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-001 - Design unified IArtifactStore interface
// Description: Unified artifact storage interface with bom-ref support
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Core;
/// <summary>
/// Unified artifact store interface supporting bom-ref based storage and retrieval.
/// Path convention: /artifacts/{bom-ref-encoded}/{serialNumber}/{artifactId}.json
/// </summary>
public interface IArtifactStore
{
/// <summary>
/// Stores an artifact.
/// </summary>
/// <param name="request">Storage request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Storage result.</returns>
Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default);
/// <summary>
/// Reads an artifact.
/// </summary>
/// <param name="bomRef">Package URL or component reference.</param>
/// <param name="serialNumber">CycloneDX serialNumber (optional).</param>
/// <param name="artifactId">Artifact ID (optional, returns first match if null).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Read result with content stream.</returns>
Task<ArtifactReadResult> ReadAsync(string bomRef, string? serialNumber, string? artifactId, CancellationToken ct = default);
/// <summary>
/// Lists artifacts for a bom-ref.
/// </summary>
/// <param name="bomRef">Package URL or component reference.</param>
/// <param name="serialNumber">CycloneDX serialNumber (optional filter).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of artifact metadata.</returns>
Task<IReadOnlyList<ArtifactMetadata>> ListAsync(string bomRef, string? serialNumber = null, CancellationToken ct = default);
/// <summary>
/// Checks if an artifact exists.
/// </summary>
Task<bool> ExistsAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
/// <summary>
/// Gets artifact metadata without reading content.
/// </summary>
Task<ArtifactMetadata?> GetMetadataAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
/// <summary>
/// Deletes an artifact (soft delete, preserves for audit).
/// </summary>
Task<bool> DeleteAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
}
/// <summary>
/// Request to store an artifact.
/// </summary>
public sealed record ArtifactStoreRequest
{
/// <summary>
/// Package URL (purl) or CycloneDX bom-ref.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber URN (e.g., urn:uuid:...).
/// </summary>
public required string SerialNumber { get; init; }
/// <summary>
/// Unique artifact identifier (e.g., DSSE UUID, hash).
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Artifact content stream.
/// </summary>
public required Stream Content { get; init; }
/// <summary>
/// Content type (MIME type).
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// Artifact type classification.
/// </summary>
public ArtifactType Type { get; init; } = ArtifactType.Unknown;
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// Tenant ID for multi-tenancy.
/// </summary>
public Guid TenantId { get; init; }
/// <summary>
/// Whether to overwrite existing artifact.
/// </summary>
public bool Overwrite { get; init; } = false;
}
/// <summary>
/// Result of storing an artifact.
/// </summary>
public sealed record ArtifactStoreResult
{
/// <summary>
/// Whether storage was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Storage key (full path).
/// </summary>
public string? StorageKey { get; init; }
/// <summary>
/// SHA-256 hash of stored content.
/// </summary>
public string? Sha256 { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public long? SizeBytes { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Whether this was a new artifact or an update.
/// </summary>
public bool WasCreated { get; init; }
/// <summary>
/// Creates a success result.
/// </summary>
public static ArtifactStoreResult Succeeded(string storageKey, string sha256, long sizeBytes, bool wasCreated = true)
{
return new ArtifactStoreResult
{
Success = true,
StorageKey = storageKey,
Sha256 = sha256,
SizeBytes = sizeBytes,
WasCreated = wasCreated
};
}
/// <summary>
/// Creates a failure result.
/// </summary>
public static ArtifactStoreResult Failed(string errorMessage)
{
return new ArtifactStoreResult
{
Success = false,
ErrorMessage = errorMessage
};
}
}
/// <summary>
/// Result of reading an artifact.
/// </summary>
public sealed record ArtifactReadResult : IDisposable
{
/// <summary>
/// Whether the artifact was found.
/// </summary>
public required bool Found { get; init; }
/// <summary>
/// Content stream (caller must dispose).
/// </summary>
public Stream? Content { get; init; }
/// <summary>
/// Artifact metadata.
/// </summary>
public ArtifactMetadata? Metadata { get; init; }
/// <summary>
/// Error message if not found.
/// </summary>
public string? ErrorMessage { get; init; }
/// <inheritdoc />
public void Dispose()
{
Content?.Dispose();
}
/// <summary>
/// Creates a found result.
/// </summary>
public static ArtifactReadResult Succeeded(Stream content, ArtifactMetadata metadata)
{
return new ArtifactReadResult
{
Found = true,
Content = content,
Metadata = metadata
};
}
/// <summary>
/// Creates a not found result.
/// </summary>
public static ArtifactReadResult NotFound(string? message = null)
{
return new ArtifactReadResult
{
Found = false,
ErrorMessage = message ?? "Artifact not found"
};
}
}
/// <summary>
/// Artifact metadata.
/// </summary>
public sealed record ArtifactMetadata
{
/// <summary>
/// Full storage key/path.
/// </summary>
public required string StorageKey { get; init; }
/// <summary>
/// Package URL or bom-ref.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber.
/// </summary>
public required string SerialNumber { get; init; }
/// <summary>
/// Artifact ID.
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Content type (MIME).
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// SHA-256 hash.
/// </summary>
public required string Sha256 { get; init; }
/// <summary>
/// Creation timestamp.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Artifact type.
/// </summary>
public ArtifactType Type { get; init; }
/// <summary>
/// Tenant ID.
/// </summary>
public Guid TenantId { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? ExtraMetadata { get; init; }
}
/// <summary>
/// Artifact type classification.
/// </summary>
public enum ArtifactType
{
/// <summary>Unknown type.</summary>
Unknown,
/// <summary>SBOM (CycloneDX or SPDX).</summary>
Sbom,
/// <summary>VEX document.</summary>
Vex,
/// <summary>DSSE envelope/attestation.</summary>
DsseEnvelope,
/// <summary>Rekor transparency log proof.</summary>
RekorProof,
/// <summary>Verdict record.</summary>
Verdict,
/// <summary>Policy bundle.</summary>
PolicyBundle,
/// <summary>Provenance attestation.</summary>
Provenance,
/// <summary>Build log.</summary>
BuildLog,
/// <summary>Test results.</summary>
TestResults,
/// <summary>Scan results.</summary>
ScanResults
}
/// <summary>
/// Utility for encoding bom-refs for path usage.
/// </summary>
public static class BomRefEncoder
{
/// <summary>
/// Encodes a bom-ref/purl for use in storage paths.
/// Handles special characters in purls (/, @, :, etc.).
/// </summary>
public static string Encode(string bomRef)
{
if (string.IsNullOrWhiteSpace(bomRef))
return "unknown";
// Replace path-unsafe characters
return bomRef
.Replace("/", "_")
.Replace(":", "_")
.Replace("@", "_at_")
.Replace("?", "_q_")
.Replace("#", "_h_")
.Replace("%", "_p_");
}
/// <summary>
/// Decodes an encoded bom-ref back to original form.
/// </summary>
public static string Decode(string encoded)
{
if (string.IsNullOrWhiteSpace(encoded))
return string.Empty;
return encoded
.Replace("_at_", "@")
.Replace("_q_", "?")
.Replace("_h_", "#")
.Replace("_p_", "%");
// Note: / and : remain as _ since they're ambiguous
}
/// <summary>
/// Builds the storage path for an artifact.
/// </summary>
public static string BuildPath(string bomRef, string serialNumber, string artifactId)
{
var encodedBomRef = Encode(bomRef);
var encodedSerial = Encode(serialNumber);
return $"artifacts/{encodedBomRef}/{encodedSerial}/{artifactId}.json";
}
}

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Artifact.Core</RootNamespace>
<AssemblyName>StellaOps.Artifact.Core</AssemblyName>
<Description>Unified artifact storage interfaces and models for StellaOps evidence management</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Text.Json" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,277 @@
// -----------------------------------------------------------------------------
// ArtifactIndexRepository.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: PostgreSQL-backed artifact index for efficient querying
// -----------------------------------------------------------------------------
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL repository for artifact index.
/// Provides efficient bom-ref based querying.
/// </summary>
public interface IArtifactIndexRepository
{
/// <summary>
/// Indexes a stored artifact.
/// </summary>
Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by bom-ref.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by bom-ref and serial number.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default);
/// <summary>
/// Gets a specific artifact index entry.
/// </summary>
Task<ArtifactIndexEntry?> GetAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
/// <summary>
/// Removes an artifact from the index.
/// </summary>
Task<bool> RemoveAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by SHA-256 hash.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default);
/// <summary>
/// Finds artifacts by type.
/// </summary>
Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default);
}
/// <summary>
/// Artifact index entry for PostgreSQL storage.
/// </summary>
public sealed record ArtifactIndexEntry
{
/// <summary>Primary key.</summary>
public Guid Id { get; init; } = Guid.NewGuid();
/// <summary>Tenant ID.</summary>
public required Guid TenantId { get; init; }
/// <summary>Package URL or bom-ref.</summary>
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber.</summary>
public required string SerialNumber { get; init; }
/// <summary>Artifact ID.</summary>
public required string ArtifactId { get; init; }
/// <summary>Full storage key/path.</summary>
public required string StorageKey { get; init; }
/// <summary>Artifact type.</summary>
public required ArtifactType Type { get; init; }
/// <summary>Content type (MIME).</summary>
public required string ContentType { get; init; }
/// <summary>SHA-256 hash.</summary>
public required string Sha256 { get; init; }
/// <summary>Size in bytes.</summary>
public required long SizeBytes { get; init; }
/// <summary>When the artifact was stored.</summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>When the index entry was last updated.</summary>
public DateTimeOffset? UpdatedAt { get; init; }
/// <summary>Whether the artifact has been deleted.</summary>
public bool IsDeleted { get; init; }
/// <summary>Deletion timestamp.</summary>
public DateTimeOffset? DeletedAt { get; init; }
}
/// <summary>
/// In-memory implementation for testing.
/// </summary>
public sealed class InMemoryArtifactIndexRepository : IArtifactIndexRepository
{
private readonly List<ArtifactIndexEntry> _entries = new();
private readonly object _lock = new();
/// <inheritdoc />
public Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
{
lock (_lock)
{
// Remove existing entry if present
_entries.RemoveAll(e =>
e.BomRef == entry.BomRef &&
e.SerialNumber == entry.SerialNumber &&
e.ArtifactId == entry.ArtifactId);
_entries.Add(entry);
}
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.BomRef == bomRef && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.BomRef == bomRef && e.SerialNumber == serialNumber && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<ArtifactIndexEntry?> GetAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var entry = _entries.FirstOrDefault(e =>
e.BomRef == bomRef &&
e.SerialNumber == serialNumber &&
e.ArtifactId == artifactId &&
!e.IsDeleted);
return Task.FromResult(entry);
}
}
/// <inheritdoc />
public Task<bool> RemoveAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var entry = _entries.FirstOrDefault(e =>
e.BomRef == bomRef &&
e.SerialNumber == serialNumber &&
e.ArtifactId == artifactId &&
!e.IsDeleted);
if (entry != null)
{
var index = _entries.IndexOf(entry);
_entries[index] = entry with
{
IsDeleted = true,
DeletedAt = DateTimeOffset.UtcNow
};
return Task.FromResult(true);
}
return Task.FromResult(false);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.Sha256 == sha256 && !e.IsDeleted)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
/// <inheritdoc />
public Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default)
{
lock (_lock)
{
var result = _entries
.Where(e => e.Type == type && e.TenantId == tenantId && !e.IsDeleted)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactIndexEntry>>(result);
}
}
}
/// <summary>
/// PostgreSQL artifact index table schema.
/// </summary>
public static class ArtifactIndexSchema
{
/// <summary>
/// SQL migration to create the artifact index table.
/// </summary>
public const string CreateTableSql = """
CREATE TABLE IF NOT EXISTS evidence.artifact_index (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
bom_ref TEXT NOT NULL,
serial_number TEXT NOT NULL,
artifact_id TEXT NOT NULL,
storage_key TEXT NOT NULL,
artifact_type TEXT NOT NULL,
content_type TEXT NOT NULL,
sha256 TEXT NOT NULL,
size_bytes BIGINT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
deleted_at TIMESTAMPTZ,
CONSTRAINT uq_artifact_index_key UNIQUE (tenant_id, bom_ref, serial_number, artifact_id)
);
-- Index for bom-ref queries (most common)
CREATE INDEX IF NOT EXISTS idx_artifact_index_bom_ref
ON evidence.artifact_index (tenant_id, bom_ref)
WHERE NOT is_deleted;
-- Index for SHA-256 lookups (deduplication)
CREATE INDEX IF NOT EXISTS idx_artifact_index_sha256
ON evidence.artifact_index (sha256)
WHERE NOT is_deleted;
-- Index for type-based queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_type
ON evidence.artifact_index (tenant_id, artifact_type)
WHERE NOT is_deleted;
-- Index for serial number + bom-ref compound queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_serial
ON evidence.artifact_index (tenant_id, bom_ref, serial_number)
WHERE NOT is_deleted;
""";
}

View File

@@ -0,0 +1,407 @@
// -----------------------------------------------------------------------------
// ArtifactMigrationService.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-006 - Migrate existing evidence to unified store
// Description: Migrates existing evidence from legacy paths to unified store
// -----------------------------------------------------------------------------
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Migration options.
/// </summary>
public sealed class ArtifactMigrationOptions
{
/// <summary>
/// Maximum number of parallel migrations.
/// </summary>
public int MaxParallelism { get; set; } = 4;
/// <summary>
/// Batch size for processing.
/// </summary>
public int BatchSize { get; set; } = 100;
/// <summary>
/// Whether to copy (preserve original) or move.
/// </summary>
public bool CopyMode { get; set; } = true;
/// <summary>
/// Skip artifacts that already exist in the unified store.
/// </summary>
public bool SkipExisting { get; set; } = true;
/// <summary>
/// Whether to write a migration log.
/// </summary>
public bool EnableLogging { get; set; } = true;
}
/// <summary>
/// Progress report for migration.
/// </summary>
public sealed record MigrationProgress
{
public int TotalItems { get; init; }
public int ProcessedItems { get; init; }
public int SuccessCount { get; init; }
public int FailureCount { get; init; }
public int SkippedCount { get; init; }
public DateTimeOffset StartedAt { get; init; }
public DateTimeOffset LastUpdateAt { get; init; }
public string CurrentItem { get; init; } = string.Empty;
public TimeSpan EstimatedRemaining => ProcessedItems > 0
? TimeSpan.FromSeconds((TotalItems - ProcessedItems) * (LastUpdateAt - StartedAt).TotalSeconds / ProcessedItems)
: TimeSpan.Zero;
}
/// <summary>
/// Result of migrating a single artifact.
/// </summary>
public sealed record ArtifactMigrationResult
{
public required string OriginalPath { get; init; }
public required string? NewPath { get; init; }
public required bool Success { get; init; }
public required bool Skipped { get; init; }
public string? BomRef { get; init; }
public string? SerialNumber { get; init; }
public string? ErrorMessage { get; init; }
}
/// <summary>
/// Legacy artifact source for migration.
/// </summary>
public interface ILegacyArtifactSource
{
/// <summary>
/// Enumerates all artifacts in the legacy store.
/// </summary>
IAsyncEnumerable<LegacyArtifact> EnumerateAsync(CancellationToken ct = default);
/// <summary>
/// Gets the total count of artifacts.
/// </summary>
Task<int> CountAsync(CancellationToken ct = default);
/// <summary>
/// Reads content from a legacy path.
/// </summary>
Task<Stream?> ReadAsync(string legacyPath, CancellationToken ct = default);
}
/// <summary>
/// Legacy artifact descriptor.
/// </summary>
public sealed record LegacyArtifact
{
public required string LegacyPath { get; init; }
public required string ContentType { get; init; }
public required long SizeBytes { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public Guid TenantId { get; init; }
public string? BundleId { get; init; }
}
/// <summary>
/// Service for migrating legacy evidence to unified artifact store.
/// </summary>
public sealed class ArtifactMigrationService
{
private readonly IArtifactStore _targetStore;
private readonly ILegacyArtifactSource _source;
private readonly ICycloneDxExtractor _extractor;
private readonly ArtifactMigrationOptions _options;
private readonly ILogger<ArtifactMigrationService> _logger;
public ArtifactMigrationService(
IArtifactStore targetStore,
ILegacyArtifactSource source,
ICycloneDxExtractor extractor,
ArtifactMigrationOptions options,
ILogger<ArtifactMigrationService> logger)
{
_targetStore = targetStore ?? throw new ArgumentNullException(nameof(targetStore));
_source = source ?? throw new ArgumentNullException(nameof(source));
_extractor = extractor ?? throw new ArgumentNullException(nameof(extractor));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Runs the migration asynchronously, reporting progress.
/// </summary>
public async IAsyncEnumerable<ArtifactMigrationResult> MigrateAsync(
IProgress<MigrationProgress>? progress = null,
[EnumeratorCancellation] CancellationToken ct = default)
{
var totalCount = await _source.CountAsync(ct).ConfigureAwait(false);
var startedAt = DateTimeOffset.UtcNow;
var processed = 0;
var succeeded = 0;
var failed = 0;
var skipped = 0;
_logger.LogInformation("Starting migration of {Count} artifacts", totalCount);
var semaphore = new SemaphoreSlim(_options.MaxParallelism);
var batch = new List<Task<ArtifactMigrationResult>>(_options.BatchSize);
await foreach (var legacy in _source.EnumerateAsync(ct).ConfigureAwait(false))
{
await semaphore.WaitAsync(ct).ConfigureAwait(false);
batch.Add(Task.Run(async () =>
{
try
{
return await MigrateOneAsync(legacy, ct).ConfigureAwait(false);
}
finally
{
semaphore.Release();
}
}, ct));
// Process batch when full
if (batch.Count >= _options.BatchSize)
{
foreach (var result in await ProcessBatchAsync(batch))
{
processed++;
if (result.Success && !result.Skipped) succeeded++;
else if (result.Skipped) skipped++;
else failed++;
progress?.Report(new MigrationProgress
{
TotalItems = totalCount,
ProcessedItems = processed,
SuccessCount = succeeded,
FailureCount = failed,
SkippedCount = skipped,
StartedAt = startedAt,
LastUpdateAt = DateTimeOffset.UtcNow,
CurrentItem = result.OriginalPath
});
yield return result;
}
batch.Clear();
}
}
// Process remaining
if (batch.Count > 0)
{
foreach (var result in await ProcessBatchAsync(batch))
{
processed++;
if (result.Success && !result.Skipped) succeeded++;
else if (result.Skipped) skipped++;
else failed++;
progress?.Report(new MigrationProgress
{
TotalItems = totalCount,
ProcessedItems = processed,
SuccessCount = succeeded,
FailureCount = failed,
SkippedCount = skipped,
StartedAt = startedAt,
LastUpdateAt = DateTimeOffset.UtcNow,
CurrentItem = result.OriginalPath
});
yield return result;
}
}
_logger.LogInformation(
"Migration completed: {Succeeded} succeeded, {Failed} failed, {Skipped} skipped out of {Total}",
succeeded, failed, skipped, totalCount);
}
private async Task<IReadOnlyList<ArtifactMigrationResult>> ProcessBatchAsync(
List<Task<ArtifactMigrationResult>> batch)
{
await Task.WhenAll(batch).ConfigureAwait(false);
return batch.Select(t => t.Result).ToList();
}
private async Task<ArtifactMigrationResult> MigrateOneAsync(LegacyArtifact legacy, CancellationToken ct)
{
try
{
// Read content from legacy store
var stream = await _source.ReadAsync(legacy.LegacyPath, ct).ConfigureAwait(false);
if (stream == null)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = false,
Skipped = false,
ErrorMessage = "Content not found"
};
}
// Buffer the stream for multiple reads
using var memoryStream = new MemoryStream();
await stream.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
await stream.DisposeAsync().ConfigureAwait(false);
memoryStream.Position = 0;
// Try to extract bom-ref from content
string bomRef;
string serialNumber;
if (IsSbomContent(legacy.ContentType))
{
var metadata = await _extractor.ExtractAsync(memoryStream, ct).ConfigureAwait(false);
memoryStream.Position = 0;
if (metadata.Success)
{
// Prefer purl, fallback to bom-ref
bomRef = metadata.PrimaryPurl ?? metadata.PrimaryBomRef ?? GenerateFallbackBomRef(legacy);
serialNumber = metadata.SerialNumber ?? GenerateFallbackSerial(legacy);
}
else
{
// Fallback for malformed SBOMs
bomRef = GenerateFallbackBomRef(legacy);
serialNumber = GenerateFallbackSerial(legacy);
}
}
else
{
// Non-SBOM content: use legacy path to generate bom-ref
bomRef = GenerateFallbackBomRef(legacy);
serialNumber = GenerateFallbackSerial(legacy);
}
// Generate artifact ID from legacy path
var artifactId = GenerateArtifactId(legacy);
// Check if already exists
if (_options.SkipExisting)
{
var exists = await _targetStore.ExistsAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (exists)
{
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = true,
Skipped = true,
BomRef = bomRef,
SerialNumber = serialNumber
};
}
}
// Store in unified store
var storeRequest = new ArtifactStoreRequest
{
BomRef = bomRef,
SerialNumber = serialNumber,
ArtifactId = artifactId,
Content = memoryStream,
ContentType = legacy.ContentType,
Type = InferArtifactType(legacy.ContentType, legacy.LegacyPath),
TenantId = legacy.TenantId,
Overwrite = false,
Metadata = new Dictionary<string, string>
{
["legacy_path"] = legacy.LegacyPath,
["migrated_at"] = DateTimeOffset.UtcNow.ToString("O")
}
};
var result = await _targetStore.StoreAsync(storeRequest, ct).ConfigureAwait(false);
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = result.StorageKey,
Success = result.Success,
Skipped = false,
BomRef = bomRef,
SerialNumber = serialNumber,
ErrorMessage = result.ErrorMessage
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to migrate {Path}", legacy.LegacyPath);
return new ArtifactMigrationResult
{
OriginalPath = legacy.LegacyPath,
NewPath = null,
Success = false,
Skipped = false,
ErrorMessage = ex.Message
};
}
}
private static bool IsSbomContent(string contentType)
{
return contentType.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)
|| contentType.Contains("spdx", StringComparison.OrdinalIgnoreCase)
|| contentType == "application/json"; // Assume JSON might be SBOM
}
private static string GenerateFallbackBomRef(LegacyArtifact legacy)
{
// Generate a purl-like reference from the legacy path
var sanitized = legacy.LegacyPath
.Replace("\\", "/")
.Replace("tenants/", "")
.Replace("bundles/", "");
return $"pkg:stella/legacy/{Uri.EscapeDataString(sanitized)}";
}
private static string GenerateFallbackSerial(LegacyArtifact legacy)
{
// Generate deterministic serial from path
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(legacy.LegacyPath));
var guid = new Guid(hash.Take(16).ToArray());
return $"urn:uuid:{guid}";
}
private static string GenerateArtifactId(LegacyArtifact legacy)
{
// Extract filename from path or generate UUID
var fileName = Path.GetFileNameWithoutExtension(legacy.LegacyPath);
return !string.IsNullOrEmpty(fileName) ? fileName : Guid.NewGuid().ToString();
}
private static ArtifactType InferArtifactType(string contentType, string path)
{
if (contentType.Contains("cyclonedx") || contentType.Contains("spdx"))
return ArtifactType.Sbom;
if (contentType.Contains("vex") || contentType.Contains("openvex"))
return ArtifactType.Vex;
if (contentType.Contains("dsse") || path.Contains("dsse"))
return ArtifactType.DsseEnvelope;
if (path.Contains("rekor"))
return ArtifactType.RekorProof;
if (path.Contains("verdict"))
return ArtifactType.Verdict;
return ArtifactType.Unknown;
}
}

View File

@@ -0,0 +1,123 @@
-- Artifact Index Schema Migration 001: Initial Schema
-- Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
-- Tasks: AS-003 - Create ArtifactStore PostgreSQL index
-- Description: Creates the artifact index table for unified artifact storage
-- ============================================================================
-- Schema Creation
-- ============================================================================
CREATE SCHEMA IF NOT EXISTS evidence;
-- ============================================================================
-- Artifact Index Table
-- ============================================================================
-- Indexes S3-stored artifacts for efficient bom-ref based querying.
-- Supports content deduplication via SHA-256 and soft-delete for retention.
CREATE TABLE IF NOT EXISTS evidence.artifact_index (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
bom_ref TEXT NOT NULL,
serial_number TEXT NOT NULL,
artifact_id TEXT NOT NULL,
storage_key TEXT NOT NULL,
artifact_type TEXT NOT NULL,
content_type TEXT NOT NULL,
sha256 TEXT NOT NULL,
size_bytes BIGINT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
deleted_at TIMESTAMPTZ,
-- Unique constraint per tenant for artifact key
CONSTRAINT uq_artifact_index_key UNIQUE (tenant_id, bom_ref, serial_number, artifact_id)
);
-- ============================================================================
-- Indexes
-- ============================================================================
-- Index for bom-ref queries (most common query pattern)
CREATE INDEX IF NOT EXISTS idx_artifact_index_bom_ref
ON evidence.artifact_index (tenant_id, bom_ref)
WHERE NOT is_deleted;
-- Index for SHA-256 lookups (content deduplication)
CREATE INDEX IF NOT EXISTS idx_artifact_index_sha256
ON evidence.artifact_index (sha256)
WHERE NOT is_deleted;
-- Index for type-based queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_type
ON evidence.artifact_index (tenant_id, artifact_type)
WHERE NOT is_deleted;
-- Index for serial number + bom-ref compound queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_serial
ON evidence.artifact_index (tenant_id, bom_ref, serial_number)
WHERE NOT is_deleted;
-- Index for time-based queries
CREATE INDEX IF NOT EXISTS idx_artifact_index_created
ON evidence.artifact_index (tenant_id, created_at DESC)
WHERE NOT is_deleted;
-- ============================================================================
-- Row Level Security (RLS)
-- ============================================================================
ALTER TABLE evidence.artifact_index ENABLE ROW LEVEL SECURITY;
-- Tenant isolation helper
CREATE OR REPLACE FUNCTION evidence.require_current_tenant()
RETURNS UUID
LANGUAGE plpgsql STABLE SECURITY DEFINER
AS $$
DECLARE
v_tenant TEXT;
BEGIN
v_tenant := current_setting('app.tenant_id', true);
IF v_tenant IS NULL OR v_tenant = '' THEN
RAISE EXCEPTION 'app.tenant_id session variable not set'
USING HINT = 'Set via: SELECT set_config(''app.tenant_id'', ''<tenant>'', false)',
ERRCODE = 'P0001';
END IF;
RETURN v_tenant::UUID;
END;
$$;
-- RLS policies
CREATE POLICY artifact_index_tenant_isolation ON evidence.artifact_index
USING (tenant_id = evidence.require_current_tenant());
CREATE POLICY artifact_index_insert_tenant ON evidence.artifact_index
FOR INSERT
WITH CHECK (tenant_id = evidence.require_current_tenant());
CREATE POLICY artifact_index_update_tenant ON evidence.artifact_index
FOR UPDATE
USING (tenant_id = evidence.require_current_tenant());
-- ============================================================================
-- Comments
-- ============================================================================
COMMENT ON TABLE evidence.artifact_index IS
'Index of artifacts stored in S3 for efficient bom-ref based querying';
COMMENT ON COLUMN evidence.artifact_index.bom_ref IS
'Package URL (purl) or CycloneDX bom-ref';
COMMENT ON COLUMN evidence.artifact_index.serial_number IS
'CycloneDX serialNumber URN (urn:uuid:...)';
COMMENT ON COLUMN evidence.artifact_index.storage_key IS
'Full S3 object key/path';
COMMENT ON COLUMN evidence.artifact_index.sha256 IS
'SHA-256 hash for content deduplication';
COMMENT ON COLUMN evidence.artifact_index.artifact_type IS
'Type classification: Sbom, Vex, DsseEnvelope, RekorProof, Verdict, etc.';

View File

@@ -0,0 +1,310 @@
// -----------------------------------------------------------------------------
// PostgresArtifactIndexRepository.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-003 - Create ArtifactStore PostgreSQL index
// Description: PostgreSQL implementation of artifact index repository
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Artifact.Core;
using StellaOps.Infrastructure.Postgres.Connections;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// PostgreSQL data source for the Artifact module.
/// </summary>
public sealed class ArtifactDataSource : DataSourceBase
{
public const string DefaultSchemaName = "evidence";
public ArtifactDataSource(
Microsoft.Extensions.Options.IOptions<StellaOps.Infrastructure.Postgres.Options.PostgresOptions> options,
ILogger<ArtifactDataSource> logger)
: base(CreateOptions(options.Value), logger)
{
}
protected override string ModuleName => "Artifact";
private static StellaOps.Infrastructure.Postgres.Options.PostgresOptions CreateOptions(
StellaOps.Infrastructure.Postgres.Options.PostgresOptions baseOptions)
{
if (string.IsNullOrWhiteSpace(baseOptions.SchemaName))
{
baseOptions.SchemaName = DefaultSchemaName;
}
return baseOptions;
}
}
/// <summary>
/// PostgreSQL implementation of <see cref="IArtifactIndexRepository"/>.
/// </summary>
public sealed class PostgresArtifactIndexRepository : RepositoryBase<ArtifactDataSource>, IArtifactIndexRepository
{
private readonly string _tenantId;
public PostgresArtifactIndexRepository(
ArtifactDataSource dataSource,
ILogger<PostgresArtifactIndexRepository> logger,
string tenantId = "default")
: base(dataSource, logger)
{
_tenantId = tenantId;
}
/// <inheritdoc />
public async Task IndexAsync(ArtifactIndexEntry entry, CancellationToken ct = default)
{
const string sql = """
INSERT INTO evidence.artifact_index (
id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at
) VALUES (
@id, @tenant_id, @bom_ref, @serial_number, @artifact_id, @storage_key,
@artifact_type, @content_type, @sha256, @size_bytes, @created_at
)
ON CONFLICT (tenant_id, bom_ref, serial_number, artifact_id)
DO UPDATE SET
storage_key = EXCLUDED.storage_key,
artifact_type = EXCLUDED.artifact_type,
content_type = EXCLUDED.content_type,
sha256 = EXCLUDED.sha256,
size_bytes = EXCLUDED.size_bytes,
updated_at = NOW(),
is_deleted = FALSE,
deleted_at = NULL
""";
await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "id", entry.Id);
AddParameter(command, "tenant_id", entry.TenantId);
AddParameter(command, "bom_ref", entry.BomRef);
AddParameter(command, "serial_number", entry.SerialNumber);
AddParameter(command, "artifact_id", entry.ArtifactId);
AddParameter(command, "storage_key", entry.StorageKey);
AddParameter(command, "artifact_type", entry.Type.ToString());
AddParameter(command, "content_type", entry.ContentType);
AddParameter(command, "sha256", entry.Sha256);
AddParameter(command, "size_bytes", entry.SizeBytes);
AddParameter(command, "created_at", entry.CreatedAt);
await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAsync(string bomRef, CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND NOT is_deleted
ORDER BY created_at DESC
""";
return await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
AddParameter(cmd, "bom_ref", bomRef);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByBomRefAndSerialAsync(
string bomRef,
string serialNumber,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number AND NOT is_deleted
ORDER BY created_at DESC
""";
return await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<ArtifactIndexEntry?> GetAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
AND artifact_id = @artifact_id AND NOT is_deleted
""";
var results = await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "tenant_id", Guid.Parse(_tenantId));
AddParameter(cmd, "bom_ref", bomRef);
AddParameter(cmd, "serial_number", serialNumber);
AddParameter(cmd, "artifact_id", artifactId);
}, MapEntry, ct).ConfigureAwait(false);
return results.Count > 0 ? results[0] : null;
}
/// <inheritdoc />
public async Task<bool> RemoveAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
const string sql = """
UPDATE evidence.artifact_index
SET is_deleted = TRUE, deleted_at = NOW(), updated_at = NOW()
WHERE tenant_id = @tenant_id AND bom_ref = @bom_ref AND serial_number = @serial_number
AND artifact_id = @artifact_id AND NOT is_deleted
""";
await using var connection = await DataSource.OpenConnectionAsync(_tenantId, "writer", ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", Guid.Parse(_tenantId));
AddParameter(command, "bom_ref", bomRef);
AddParameter(command, "serial_number", serialNumber);
AddParameter(command, "artifact_id", artifactId);
var rowsAffected = await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
return rowsAffected > 0;
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindBySha256Async(string sha256, CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE sha256 = @sha256 AND NOT is_deleted
ORDER BY created_at DESC
LIMIT 100
""";
return await QueryAsync(_tenantId, sql, cmd =>
{
AddParameter(cmd, "sha256", sha256);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTypeAsync(
ArtifactType type,
Guid tenantId,
int limit = 100,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND artifact_type = @artifact_type AND NOT is_deleted
ORDER BY created_at DESC
LIMIT @limit
""";
return await QueryAsync(tenantId.ToString(), sql, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "artifact_type", type.ToString());
AddParameter(cmd, "limit", limit);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <summary>
/// Finds artifacts within a time range.
/// </summary>
public async Task<IReadOnlyList<ArtifactIndexEntry>> FindByTimeRangeAsync(
Guid tenantId,
DateTimeOffset from,
DateTimeOffset to,
int limit = 1000,
CancellationToken ct = default)
{
const string sql = """
SELECT id, tenant_id, bom_ref, serial_number, artifact_id, storage_key,
artifact_type, content_type, sha256, size_bytes, created_at, updated_at,
is_deleted, deleted_at
FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND created_at >= @from AND created_at < @to AND NOT is_deleted
ORDER BY created_at DESC
LIMIT @limit
""";
return await QueryAsync(tenantId.ToString(), sql, cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "from", from);
AddParameter(cmd, "to", to);
AddParameter(cmd, "limit", limit);
}, MapEntry, ct).ConfigureAwait(false);
}
/// <summary>
/// Counts artifacts for a tenant.
/// </summary>
public async Task<int> CountAsync(Guid tenantId, CancellationToken ct = default)
{
const string sql = """
SELECT COUNT(*) FROM evidence.artifact_index
WHERE tenant_id = @tenant_id AND NOT is_deleted
""";
await using var connection = await DataSource.OpenConnectionAsync(tenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
AddParameter(command, "tenant_id", tenantId);
var result = await command.ExecuteScalarAsync(ct).ConfigureAwait(false);
return Convert.ToInt32(result);
}
private static ArtifactIndexEntry MapEntry(NpgsqlDataReader reader)
{
var artifactTypeString = reader.GetString(6);
var artifactType = Enum.TryParse<ArtifactType>(artifactTypeString, out var at) ? at : ArtifactType.Unknown;
return new ArtifactIndexEntry
{
Id = reader.GetGuid(0),
TenantId = reader.GetGuid(1),
BomRef = reader.GetString(2),
SerialNumber = reader.GetString(3),
ArtifactId = reader.GetString(4),
StorageKey = reader.GetString(5),
Type = artifactType,
ContentType = reader.GetString(7),
Sha256 = reader.GetString(8),
SizeBytes = reader.GetInt64(9),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(10),
UpdatedAt = reader.IsDBNull(11) ? null : reader.GetFieldValue<DateTimeOffset>(11),
IsDeleted = reader.GetBoolean(12),
DeletedAt = reader.IsDBNull(13) ? null : reader.GetFieldValue<DateTimeOffset>(13)
};
}
}

View File

@@ -0,0 +1,429 @@
// -----------------------------------------------------------------------------
// S3ArtifactStore.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Task: AS-002 - Implement S3-backed ArtifactStore
// Description: S3-backed implementation of unified artifact store
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Artifact.Core;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Configuration options for S3-backed artifact store.
/// </summary>
public sealed class S3UnifiedArtifactStoreOptions
{
/// <summary>
/// S3 bucket name.
/// </summary>
public string BucketName { get; set; } = string.Empty;
/// <summary>
/// Path prefix within the bucket.
/// </summary>
public string Prefix { get; set; } = "artifacts";
/// <summary>
/// Whether to use content-addressable storage for deduplication.
/// </summary>
public bool EnableDeduplication { get; set; } = true;
/// <summary>
/// Whether to store metadata as sidecar JSON files.
/// </summary>
public bool UseSidecarMetadata { get; set; } = false;
/// <summary>
/// Whether to overwrite existing artifacts.
/// </summary>
public bool AllowOverwrite { get; set; } = false;
/// <summary>
/// Maximum artifact size in bytes.
/// </summary>
public long MaxArtifactSizeBytes { get; set; } = 100 * 1024 * 1024; // 100MB
/// <summary>
/// Retention policies per artifact type. Key is ArtifactType enum name.
/// Sprint: SPRINT_20260118_017 (AS-002)
/// </summary>
public Dictionary<string, RetentionPolicy> RetentionPolicies { get; set; } = new()
{
["Sbom"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false }, // 7 years
["Vex"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["Dsse"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["RekorProof"] = new RetentionPolicy { RetentionDays = 365 * 10, DeleteAfterExpiry = false }, // 10 years
["Attestation"] = new RetentionPolicy { RetentionDays = 365 * 7, DeleteAfterExpiry = false },
["BuildLog"] = new RetentionPolicy { RetentionDays = 365, DeleteAfterExpiry = true }, // 1 year
["ScanResult"] = new RetentionPolicy { RetentionDays = 365 * 2, DeleteAfterExpiry = true }, // 2 years
["Temporary"] = new RetentionPolicy { RetentionDays = 30, DeleteAfterExpiry = true }
};
/// <summary>
/// Default retention policy for unspecified artifact types.
/// </summary>
public RetentionPolicy DefaultRetentionPolicy { get; set; } = new()
{
RetentionDays = 365 * 5, // 5 years default
DeleteAfterExpiry = false
};
}
/// <summary>
/// Retention policy for artifact types.
/// Sprint: SPRINT_20260118_017 (AS-002)
/// </summary>
public sealed class RetentionPolicy
{
/// <summary>
/// Number of days to retain artifacts.
/// </summary>
public int RetentionDays { get; set; } = 365 * 5;
/// <summary>
/// Whether to delete artifacts after expiry (true) or just mark expired (false).
/// </summary>
public bool DeleteAfterExpiry { get; set; } = false;
/// <summary>
/// Optional S3 storage class to transition to after specified days.
/// </summary>
public string? TransitionStorageClass { get; set; }
/// <summary>
/// Days after creation to transition to TransitionStorageClass.
/// </summary>
public int? TransitionAfterDays { get; set; }
}
/// <summary>
/// S3 client interface for dependency injection.
/// </summary>
public interface IS3UnifiedClient
{
Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken ct);
Task PutObjectAsync(string bucketName, string key, Stream content, string contentType, IDictionary<string, string> metadata, CancellationToken ct);
Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken ct);
Task<IDictionary<string, string>?> GetObjectMetadataAsync(string bucketName, string key, CancellationToken ct);
Task DeleteObjectAsync(string bucketName, string key, CancellationToken ct);
Task<IReadOnlyList<string>> ListObjectsAsync(string bucketName, string prefix, CancellationToken ct);
}
/// <summary>
/// S3-backed implementation of <see cref="IArtifactStore"/>.
/// Supports content deduplication via SHA-256 and the unified path convention.
/// </summary>
public sealed class S3UnifiedArtifactStore : IArtifactStore
{
private readonly IS3UnifiedClient _client;
private readonly IArtifactIndexRepository _indexRepository;
private readonly S3UnifiedArtifactStoreOptions _options;
private readonly ILogger<S3UnifiedArtifactStore> _logger;
public S3UnifiedArtifactStore(
IS3UnifiedClient client,
IArtifactIndexRepository indexRepository,
IOptions<S3UnifiedArtifactStoreOptions> options,
ILogger<S3UnifiedArtifactStore> logger)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
_indexRepository = indexRepository ?? throw new ArgumentNullException(nameof(indexRepository));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
if (string.IsNullOrWhiteSpace(_options.BucketName))
{
throw new ArgumentException("BucketName must be configured", nameof(options));
}
}
/// <inheritdoc />
public async Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
// Build the storage path using bom-ref convention
var storagePath = BomRefEncoder.BuildPath(request.BomRef, request.SerialNumber, request.ArtifactId);
var fullKey = BuildFullKey(storagePath);
// Check if artifact already exists
if (!request.Overwrite && !_options.AllowOverwrite)
{
var exists = await _client.ObjectExistsAsync(_options.BucketName, fullKey, ct).ConfigureAwait(false);
if (exists)
{
_logger.LogInformation("Artifact already exists at {Key}, skipping", fullKey);
// Return existing metadata
var existingEntry = await _indexRepository.GetAsync(
request.BomRef, request.SerialNumber, request.ArtifactId, ct).ConfigureAwait(false);
if (existingEntry != null)
{
return ArtifactStoreResult.Succeeded(fullKey, existingEntry.Sha256, existingEntry.SizeBytes, wasCreated: false);
}
}
}
// Read content and compute hash
using var memoryStream = new MemoryStream();
await request.Content.CopyToAsync(memoryStream, ct).ConfigureAwait(false);
var contentBytes = memoryStream.ToArray();
if (contentBytes.Length > _options.MaxArtifactSizeBytes)
{
return ArtifactStoreResult.Failed($"Artifact exceeds maximum size of {_options.MaxArtifactSizeBytes} bytes");
}
var sha256 = ComputeSha256(contentBytes);
var sizeBytes = contentBytes.Length;
// Check for content deduplication
string actualStorageKey = fullKey;
if (_options.EnableDeduplication)
{
var existingBySha = await _indexRepository.FindBySha256Async(sha256, ct).ConfigureAwait(false);
if (existingBySha.Count > 0)
{
// Content already exists, just create a new index entry pointing to same content
actualStorageKey = existingBySha[0].StorageKey;
_logger.LogInformation("Deduplicating artifact {ArtifactId} - content matches {ExistingKey}",
request.ArtifactId, actualStorageKey);
}
else
{
// Store new content
using var uploadStream = new MemoryStream(contentBytes);
var metadata = BuildS3Metadata(request);
await _client.PutObjectAsync(
_options.BucketName, fullKey, uploadStream, request.ContentType, metadata, ct).ConfigureAwait(false);
}
}
else
{
// Store without deduplication
using var uploadStream = new MemoryStream(contentBytes);
var metadata = BuildS3Metadata(request);
await _client.PutObjectAsync(
_options.BucketName, fullKey, uploadStream, request.ContentType, metadata, ct).ConfigureAwait(false);
}
// Index the artifact
var indexEntry = new ArtifactIndexEntry
{
Id = Guid.NewGuid(),
TenantId = request.TenantId,
BomRef = request.BomRef,
SerialNumber = request.SerialNumber,
ArtifactId = request.ArtifactId,
StorageKey = actualStorageKey,
Type = request.Type,
ContentType = request.ContentType,
Sha256 = sha256,
SizeBytes = sizeBytes,
CreatedAt = DateTimeOffset.UtcNow
};
await _indexRepository.IndexAsync(indexEntry, ct).ConfigureAwait(false);
_logger.LogInformation(
"Stored artifact {ArtifactId} for bom-ref {BomRef} at {Key} ({Size} bytes)",
request.ArtifactId, request.BomRef, actualStorageKey, sizeBytes);
return ArtifactStoreResult.Succeeded(actualStorageKey, sha256, sizeBytes, wasCreated: true);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store artifact {ArtifactId}", request.ArtifactId);
return ArtifactStoreResult.Failed(ex.Message);
}
}
/// <inheritdoc />
public async Task<ArtifactReadResult> ReadAsync(
string bomRef,
string? serialNumber,
string? artifactId,
CancellationToken ct = default)
{
try
{
ArtifactIndexEntry? entry;
if (serialNumber != null && artifactId != null)
{
entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
}
else if (serialNumber != null)
{
var entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
entry = entries.FirstOrDefault();
}
else
{
var entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
entry = entries.FirstOrDefault();
}
if (entry == null)
{
return ArtifactReadResult.NotFound($"No artifact found for bom-ref: {bomRef}");
}
var stream = await _client.GetObjectAsync(_options.BucketName, entry.StorageKey, ct).ConfigureAwait(false);
if (stream == null)
{
return ArtifactReadResult.NotFound($"Object not found in S3: {entry.StorageKey}");
}
var metadata = new ArtifactMetadata
{
StorageKey = entry.StorageKey,
BomRef = entry.BomRef,
SerialNumber = entry.SerialNumber,
ArtifactId = entry.ArtifactId,
ContentType = entry.ContentType,
SizeBytes = entry.SizeBytes,
Sha256 = entry.Sha256,
CreatedAt = entry.CreatedAt,
Type = entry.Type,
TenantId = entry.TenantId
};
return ArtifactReadResult.Succeeded(stream, metadata);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to read artifact for bom-ref {BomRef}", bomRef);
return ArtifactReadResult.NotFound(ex.Message);
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<ArtifactMetadata>> ListAsync(
string bomRef,
string? serialNumber = null,
CancellationToken ct = default)
{
IReadOnlyList<ArtifactIndexEntry> entries;
if (serialNumber != null)
{
entries = await _indexRepository.FindByBomRefAndSerialAsync(bomRef, serialNumber, ct).ConfigureAwait(false);
}
else
{
entries = await _indexRepository.FindByBomRefAsync(bomRef, ct).ConfigureAwait(false);
}
return entries.Select(e => new ArtifactMetadata
{
StorageKey = e.StorageKey,
BomRef = e.BomRef,
SerialNumber = e.SerialNumber,
ArtifactId = e.ArtifactId,
ContentType = e.ContentType,
SizeBytes = e.SizeBytes,
Sha256 = e.Sha256,
CreatedAt = e.CreatedAt,
Type = e.Type,
TenantId = e.TenantId
}).ToList();
}
/// <inheritdoc />
public async Task<bool> ExistsAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
return entry != null;
}
/// <inheritdoc />
public async Task<ArtifactMetadata?> GetMetadataAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
var entry = await _indexRepository.GetAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (entry == null)
{
return null;
}
return new ArtifactMetadata
{
StorageKey = entry.StorageKey,
BomRef = entry.BomRef,
SerialNumber = entry.SerialNumber,
ArtifactId = entry.ArtifactId,
ContentType = entry.ContentType,
SizeBytes = entry.SizeBytes,
Sha256 = entry.Sha256,
CreatedAt = entry.CreatedAt,
Type = entry.Type,
TenantId = entry.TenantId
};
}
/// <inheritdoc />
public async Task<bool> DeleteAsync(
string bomRef,
string serialNumber,
string artifactId,
CancellationToken ct = default)
{
// Soft delete in index (don't delete from S3 for audit trail)
var removed = await _indexRepository.RemoveAsync(bomRef, serialNumber, artifactId, ct).ConfigureAwait(false);
if (removed)
{
_logger.LogInformation("Soft-deleted artifact {ArtifactId} for bom-ref {BomRef}", artifactId, bomRef);
}
return removed;
}
private string BuildFullKey(string relativePath)
{
var prefix = string.IsNullOrWhiteSpace(_options.Prefix) ? "" : _options.Prefix.TrimEnd('/') + "/";
return $"{prefix}{relativePath}";
}
private static string ComputeSha256(byte[] content)
{
var hashBytes = SHA256.HashData(content);
return Convert.ToHexStringLower(hashBytes);
}
private static Dictionary<string, string> BuildS3Metadata(ArtifactStoreRequest request)
{
var metadata = new Dictionary<string, string>
{
["x-amz-meta-bomref"] = request.BomRef,
["x-amz-meta-serialnumber"] = request.SerialNumber,
["x-amz-meta-artifactid"] = request.ArtifactId,
["x-amz-meta-artifacttype"] = request.Type.ToString()
};
if (request.Metadata != null)
{
foreach (var kvp in request.Metadata)
{
metadata[$"x-amz-meta-{kvp.Key.ToLowerInvariant()}"] = kvp.Value;
}
}
return metadata;
}
}

View File

@@ -0,0 +1,201 @@
// -----------------------------------------------------------------------------
// ServiceCollectionExtensions.cs
// Sprint: SPRINT_20260118_017_Evidence_artifact_store_unification
// Tasks: AS-002, AS-003 - Service registration
// Description: DI registration for artifact store services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Artifact.Core;
using StellaOps.Infrastructure.Postgres.Options;
namespace StellaOps.Artifact.Infrastructure;
/// <summary>
/// Extension methods for registering artifact store services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds unified artifact store services with S3 backend.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configuration">Configuration root.</param>
/// <param name="sectionName">Configuration section for options.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddUnifiedArtifactStore(
this IServiceCollection services,
IConfiguration configuration,
string sectionName = "ArtifactStore")
{
// Configure S3 store options
services.Configure<S3UnifiedArtifactStoreOptions>(configuration.GetSection($"{sectionName}:S3"));
// Configure PostgreSQL options for index
services.Configure<PostgresOptions>("Artifact", configuration.GetSection($"{sectionName}:Postgres"));
// Register data source
services.AddSingleton<ArtifactDataSource>(sp =>
{
var options = sp.GetRequiredService<IOptionsSnapshot<PostgresOptions>>().Get("Artifact");
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<ArtifactDataSource>>();
return new ArtifactDataSource(Options.Create(options), logger);
});
// Register core services
services.AddSingleton<ICycloneDxExtractor, CycloneDxExtractor>();
// Register index repository
services.AddScoped<IArtifactIndexRepository>(sp =>
{
var dataSource = sp.GetRequiredService<ArtifactDataSource>();
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<PostgresArtifactIndexRepository>>();
// TODO: Get tenant ID from context
return new PostgresArtifactIndexRepository(dataSource, logger, "default");
});
// Register S3 artifact store
services.AddScoped<IArtifactStore, S3UnifiedArtifactStore>();
return services;
}
/// <summary>
/// Adds unified artifact store with in-memory backend (for testing).
/// </summary>
/// <param name="services">Service collection.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddInMemoryArtifactStore(this IServiceCollection services)
{
services.AddSingleton<ICycloneDxExtractor, CycloneDxExtractor>();
services.AddSingleton<IArtifactIndexRepository, InMemoryArtifactIndexRepository>();
services.AddSingleton<IArtifactStore, InMemoryArtifactStore>();
return services;
}
/// <summary>
/// Adds artifact migration services.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configure">Options configuration.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddArtifactMigration(
this IServiceCollection services,
Action<ArtifactMigrationOptions>? configure = null)
{
var options = new ArtifactMigrationOptions();
configure?.Invoke(options);
services.AddSingleton(options);
services.AddScoped<ArtifactMigrationService>();
return services;
}
}
/// <summary>
/// In-memory artifact store for testing.
/// </summary>
public sealed class InMemoryArtifactStore : IArtifactStore
{
private readonly Dictionary<string, (byte[] Content, ArtifactMetadata Metadata)> _artifacts = new();
private readonly object _lock = new();
public Task<ArtifactStoreResult> StoreAsync(ArtifactStoreRequest request, CancellationToken ct = default)
{
var key = $"{request.BomRef}/{request.SerialNumber}/{request.ArtifactId}";
using var ms = new MemoryStream();
request.Content.CopyTo(ms);
var content = ms.ToArray();
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(content);
var sha256 = Convert.ToHexStringLower(hash);
var metadata = new ArtifactMetadata
{
StorageKey = key,
BomRef = request.BomRef,
SerialNumber = request.SerialNumber,
ArtifactId = request.ArtifactId,
ContentType = request.ContentType,
SizeBytes = content.Length,
Sha256 = sha256,
CreatedAt = DateTimeOffset.UtcNow,
Type = request.Type,
TenantId = request.TenantId
};
lock (_lock)
{
var wasCreated = !_artifacts.ContainsKey(key);
_artifacts[key] = (content, metadata);
return Task.FromResult(ArtifactStoreResult.Succeeded(key, sha256, content.Length, wasCreated));
}
}
public Task<ArtifactReadResult> ReadAsync(string bomRef, string? serialNumber, string? artifactId, CancellationToken ct = default)
{
lock (_lock)
{
var matching = _artifacts
.Where(kvp => kvp.Value.Metadata.BomRef == bomRef)
.Where(kvp => serialNumber == null || kvp.Value.Metadata.SerialNumber == serialNumber)
.Where(kvp => artifactId == null || kvp.Value.Metadata.ArtifactId == artifactId)
.FirstOrDefault();
if (matching.Value.Content == null)
{
return Task.FromResult(ArtifactReadResult.NotFound());
}
return Task.FromResult(ArtifactReadResult.Succeeded(
new MemoryStream(matching.Value.Content),
matching.Value.Metadata));
}
}
public Task<IReadOnlyList<ArtifactMetadata>> ListAsync(string bomRef, string? serialNumber = null, CancellationToken ct = default)
{
lock (_lock)
{
var result = _artifacts.Values
.Where(x => x.Metadata.BomRef == bomRef)
.Where(x => serialNumber == null || x.Metadata.SerialNumber == serialNumber)
.Select(x => x.Metadata)
.ToList();
return Task.FromResult<IReadOnlyList<ArtifactMetadata>>(result);
}
}
public Task<bool> ExistsAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.ContainsKey(key));
}
}
public Task<ArtifactMetadata?> GetMetadataAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.TryGetValue(key, out var entry) ? entry.Metadata : null);
}
}
public Task<bool> DeleteAsync(string bomRef, string serialNumber, string artifactId, CancellationToken ct = default)
{
var key = $"{bomRef}/{serialNumber}/{artifactId}";
lock (_lock)
{
return Task.FromResult(_artifacts.Remove(key));
}
}
}

View File

@@ -0,0 +1,30 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Artifact.Infrastructure</RootNamespace>
<AssemblyName>StellaOps.Artifact.Infrastructure</AssemblyName>
<Description>Unified artifact storage infrastructure implementations for StellaOps</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" />
<PackageReference Include="Npgsql" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Artifact.Core\StellaOps.Artifact.Core.csproj" />
<ProjectReference Include="..\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Migrations\**\*.sql" LogicalName="%(RecursiveDir)%(Filename)%(Extension)" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,54 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-003 - ICanonicalizable interface
namespace StellaOps.Canonical.Json;
/// <summary>
/// Interface for types that support canonical JSON serialization.
/// Enables deterministic serialization and content-addressed hashing.
/// </summary>
public interface ICanonicalizable
{
/// <summary>
/// Gets the canonical JSON representation of this object.
/// Output is deterministic: same input produces identical bytes.
/// </summary>
/// <returns>Canonical JSON string.</returns>
string GetCanonicalJson();
/// <summary>
/// Computes the SHA-256 digest of the canonical JSON representation.
/// </summary>
/// <returns>64-character lowercase hex string.</returns>
string ComputeDigest();
}
/// <summary>
/// Extension methods for canonical JSON operations.
/// </summary>
public static class CanonJsonExtensions
{
/// <summary>
/// Computes the canonical digest if the object implements ICanonicalizable,
/// otherwise falls back to CanonJson.Hash().
/// </summary>
public static string GetDigest<T>(T obj) where T : notnull
{
if (obj is ICanonicalizable canonicalizable)
{
return canonicalizable.ComputeDigest();
}
return CanonJson.Hash(obj);
}
/// <summary>
/// Computes the prefixed canonical digest (sha256:...).
/// </summary>
public static string GetPrefixedDigest<T>(T obj) where T : notnull
{
return "sha256:" + GetDigest(obj);
}
}

View File

@@ -0,0 +1,391 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-005 - Gate Decision Logic
using System.Collections.Immutable;
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Evaluates gate decisions based on scoring results and thresholds.
/// Implements advisory gate rules:
/// - Block if final_score ≥ 0.65 AND EPSS stale
/// - Auto-pass if trusted not_affected VEX present
/// - Warn only if 0.4 ≤ final_score less than 0.65 with patch_proof_confidence ≥ 0.7
/// </summary>
public sealed class GateEvaluator : IGateEvaluator
{
/// <inheritdoc />
public GateDecision Evaluate(
double finalScore,
EvidenceWeightedScoreInput input,
GateConfiguration config,
DateTimeOffset evaluatedAt)
{
ArgumentNullException.ThrowIfNull(input);
ArgumentNullException.ThrowIfNull(config);
var matchedRules = new List<string>();
var suggestions = new List<string>();
// Rule 1: Auto-pass on trusted VEX not_affected/fixed
if (config.AutoPassOnTrustedVex && IsAuthoritativeVexPass(input))
{
matchedRules.Add("auto_pass_trusted_vex");
return new GateDecision
{
Action = GateAction.Pass,
Reason = $"Auto-pass: Authoritative VEX status '{input.VexStatus}'",
Threshold = 0.0,
MatchedRules = matchedRules.ToImmutableArray(),
Suggestions = suggestions.ToImmutableArray()
};
}
// Evaluate custom rules first (higher priority)
foreach (var rule in config.CustomRules.OrderBy(r => r.Priority))
{
if (EvaluateCustomRule(rule, finalScore, input, evaluatedAt))
{
matchedRules.Add(rule.Id);
return new GateDecision
{
Action = rule.Action,
Reason = $"Custom rule '{rule.Name}': {rule.Condition}",
Threshold = GetThresholdForAction(rule.Action, config),
MatchedRules = matchedRules.ToImmutableArray(),
Suggestions = GetSuggestionsForRule(rule, input).ToImmutableArray()
};
}
}
// Rule 2: Block threshold check
if (finalScore >= config.BlockThreshold)
{
matchedRules.Add("block_threshold");
// Additional check: EPSS staleness amplifies block
var isEpssStale = IsEpssStale(input, config.EpssStalenessLimit, evaluatedAt);
if (isEpssStale)
{
matchedRules.Add("epss_stale");
suggestions.Add("Refresh EPSS data - current data may be outdated");
}
suggestions.Add("Review finding urgently - score exceeds block threshold");
suggestions.Add("Consider applying VEX statement if not affected");
suggestions.Add("Investigate patch availability");
return new GateDecision
{
Action = GateAction.Block,
Reason = $"Score {finalScore:F2} exceeds block threshold {config.BlockThreshold:F2}",
Threshold = config.BlockThreshold,
MatchedRules = matchedRules.ToImmutableArray(),
Suggestions = suggestions.ToImmutableArray()
};
}
// Rule 3: Warn threshold check with patch proof bypass
if (finalScore >= config.WarnThreshold)
{
// Check if patch proof bypasses warning
if (input.PatchProofConfidence >= config.PatchProofWarnBypass)
{
matchedRules.Add("warn_threshold");
matchedRules.Add("patch_proof_bypass");
return new GateDecision
{
Action = GateAction.Pass,
Reason = $"Score {finalScore:F2} in warn range but patch proof confidence {input.PatchProofConfidence:F2} provides bypass",
Threshold = config.WarnThreshold,
MatchedRules = matchedRules.ToImmutableArray(),
Suggestions = suggestions.ToImmutableArray()
};
}
matchedRules.Add("warn_threshold");
suggestions.Add("Review finding before next release");
suggestions.Add("Consider verifying patch status");
return new GateDecision
{
Action = GateAction.Warn,
Reason = $"Score {finalScore:F2} exceeds warn threshold {config.WarnThreshold:F2}",
Threshold = config.WarnThreshold,
MatchedRules = matchedRules.ToImmutableArray(),
Suggestions = suggestions.ToImmutableArray()
};
}
// Rule 4: Pass (below all thresholds)
matchedRules.Add("below_thresholds");
return new GateDecision
{
Action = GateAction.Pass,
Reason = $"Score {finalScore:F2} below all thresholds",
Threshold = config.WarnThreshold,
MatchedRules = matchedRules.ToImmutableArray(),
Suggestions = suggestions.ToImmutableArray()
};
}
/// <inheritdoc />
public IReadOnlyList<GateDecision> EvaluateBatch(
IReadOnlyList<(double FinalScore, EvidenceWeightedScoreInput Input)> findings,
GateConfiguration config,
DateTimeOffset evaluatedAt)
{
return findings
.Select(f => Evaluate(f.FinalScore, f.Input, config, evaluatedAt))
.ToList();
}
/// <summary>
/// Checks if VEX status indicates authoritative pass.
/// </summary>
private static bool IsAuthoritativeVexPass(EvidenceWeightedScoreInput input)
{
if (string.IsNullOrEmpty(input.VexStatus))
return false;
var isPassStatus =
string.Equals(input.VexStatus, "not_affected", StringComparison.OrdinalIgnoreCase) ||
string.Equals(input.VexStatus, "fixed", StringComparison.OrdinalIgnoreCase);
if (!isPassStatus)
return false;
// Check if source is authoritative
return IsAuthoritativeVexSource(input.VexSource);
}
/// <summary>
/// Checks if VEX source is authoritative.
/// </summary>
private static bool IsAuthoritativeVexSource(string? vexSource)
{
if (string.IsNullOrEmpty(vexSource))
return false;
// In-project VEX is always authoritative
if (vexSource.StartsWith(".vex/", StringComparison.OrdinalIgnoreCase) ||
vexSource.StartsWith("in-project:", StringComparison.OrdinalIgnoreCase))
return true;
// Vendor sources are authoritative
if (vexSource.StartsWith("vendor:", StringComparison.OrdinalIgnoreCase))
return true;
return false;
}
/// <summary>
/// Checks if EPSS data is stale.
/// </summary>
private static bool IsEpssStale(
EvidenceWeightedScoreInput input,
TimeSpan stalenessLimit,
DateTimeOffset evaluatedAt)
{
// If no EPSS source details, assume fresh
// In a full implementation, this would check actual data age
// For now, we check the exploit details timestamp if available
if (input.ExploitDetails?.EpssTimestamp is DateTimeOffset epssTimestamp)
{
var age = evaluatedAt - epssTimestamp;
return age > stalenessLimit;
}
return false;
}
/// <summary>
/// Evaluates a custom gate rule.
/// </summary>
private static bool EvaluateCustomRule(
GateRule rule,
double finalScore,
EvidenceWeightedScoreInput input,
DateTimeOffset evaluatedAt)
{
// Simple expression evaluation for common patterns
// Format: "dimension operator value" or "score operator value"
var condition = rule.Condition.Trim().ToLowerInvariant();
// Score conditions
if (condition.StartsWith("score"))
{
return EvaluateScoreCondition(condition, finalScore);
}
// CVSS conditions
if (condition.StartsWith("cvss"))
{
return EvaluateCvssCondition(condition, input);
}
// EPSS conditions
if (condition.StartsWith("epss"))
{
return EvaluateEpssCondition(condition, input);
}
// Reachability conditions
if (condition.StartsWith("reachability") || condition.StartsWith("rch"))
{
return EvaluateReachabilityCondition(condition, input);
}
// Exploit maturity conditions
if (condition.StartsWith("exploit") || condition.StartsWith("kev"))
{
return EvaluateExploitCondition(condition, input);
}
// Default: don't match unknown conditions
return false;
}
private static bool EvaluateScoreCondition(string condition, double score)
{
// Parse "score >= 0.5" or "score < 0.3"
var parts = condition.Split(' ', StringSplitOptions.RemoveEmptyEntries);
if (parts.Length < 3) return false;
var op = parts[1];
if (!double.TryParse(parts[2], out var threshold)) return false;
return op switch
{
">=" => score >= threshold,
">" => score > threshold,
"<=" => score <= threshold,
"<" => score < threshold,
"==" or "=" => Math.Abs(score - threshold) < 0.0001,
_ => false
};
}
private static bool EvaluateCvssCondition(string condition, EvidenceWeightedScoreInput input)
{
// Parse "cvss >= 7.0"
var parts = condition.Split(' ', StringSplitOptions.RemoveEmptyEntries);
if (parts.Length < 3) return false;
var op = parts[1];
if (!double.TryParse(parts[2], out var threshold)) return false;
return op switch
{
">=" => input.CvssBase >= threshold,
">" => input.CvssBase > threshold,
"<=" => input.CvssBase <= threshold,
"<" => input.CvssBase < threshold,
_ => false
};
}
private static bool EvaluateEpssCondition(string condition, EvidenceWeightedScoreInput input)
{
// Parse "epss >= 0.5"
var parts = condition.Split(' ', StringSplitOptions.RemoveEmptyEntries);
if (parts.Length < 3) return false;
var op = parts[1];
if (!double.TryParse(parts[2], out var threshold)) return false;
return op switch
{
">=" => input.EpssScore >= threshold,
">" => input.EpssScore > threshold,
"<=" => input.EpssScore <= threshold,
"<" => input.EpssScore < threshold,
_ => false
};
}
private static bool EvaluateReachabilityCondition(string condition, EvidenceWeightedScoreInput input)
{
// Parse "reachability >= 0.7" or "rch >= 0.7"
var parts = condition.Split(' ', StringSplitOptions.RemoveEmptyEntries);
if (parts.Length < 3) return false;
var op = parts[1];
if (!double.TryParse(parts[2], out var threshold)) return false;
return op switch
{
">=" => input.Rch >= threshold,
">" => input.Rch > threshold,
"<=" => input.Rch <= threshold,
"<" => input.Rch < threshold,
_ => false
};
}
private static bool EvaluateExploitCondition(string condition, EvidenceWeightedScoreInput input)
{
// Parse "exploit == high" or "kev == true"
var normalizedCondition = condition.ToLowerInvariant();
if (normalizedCondition.Contains("kev"))
{
return normalizedCondition.Contains("true") && input.ExploitDetails?.KevStatus == KevStatus.InKev;
}
if (normalizedCondition.Contains("high"))
{
return input.ExploitMaturity == ExploitMaturityLevel.High;
}
if (normalizedCondition.Contains("functional"))
{
return input.ExploitMaturity >= ExploitMaturityLevel.Functional;
}
if (normalizedCondition.Contains("poc") || normalizedCondition.Contains("proof"))
{
return input.ExploitMaturity >= ExploitMaturityLevel.ProofOfConcept;
}
return false;
}
private static double GetThresholdForAction(GateAction action, GateConfiguration config)
{
return action switch
{
GateAction.Block => config.BlockThreshold,
GateAction.Warn => config.WarnThreshold,
GateAction.Pass => 0.0,
_ => 0.0
};
}
private static IEnumerable<string> GetSuggestionsForRule(GateRule rule, EvidenceWeightedScoreInput input)
{
var suggestions = new List<string>();
if (rule.Action == GateAction.Block)
{
suggestions.Add($"Rule '{rule.Name}' triggered block - review required");
if (input.CvssBase >= 7.0)
suggestions.Add("Consider CVSS temporal/environmental factors");
if (input.EpssScore >= 0.5)
suggestions.Add("High exploitation probability - prioritize remediation");
}
else if (rule.Action == GateAction.Warn)
{
suggestions.Add($"Rule '{rule.Name}' triggered warning - monitor closely");
}
return suggestions;
}
}

View File

@@ -0,0 +1,40 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-005 - Gate Decision Logic (interface for TASK-030-002)
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Evaluates gate decisions based on scoring results and configuration.
/// </summary>
public interface IGateEvaluator
{
/// <summary>
/// Evaluates gate decision for a single finding.
/// </summary>
/// <param name="finalScore">Final score [0, 1].</param>
/// <param name="input">Original EWS input for context.</param>
/// <param name="config">Gate configuration with thresholds.</param>
/// <param name="evaluatedAt">Evaluation timestamp.</param>
/// <returns>Gate decision with action, reason, and suggestions.</returns>
GateDecision Evaluate(
double finalScore,
EvidenceWeightedScoreInput input,
GateConfiguration config,
DateTimeOffset evaluatedAt);
/// <summary>
/// Evaluates gate decisions for multiple findings in batch.
/// </summary>
/// <param name="findings">Collection of score/input pairs.</param>
/// <param name="config">Gate configuration with thresholds.</param>
/// <param name="evaluatedAt">Evaluation timestamp.</param>
/// <returns>Gate decisions for each finding.</returns>
IReadOnlyList<GateDecision> EvaluateBatch(
IReadOnlyList<(double FinalScore, EvidenceWeightedScoreInput Input)> findings,
GateConfiguration config,
DateTimeOffset evaluatedAt);
}

View File

@@ -0,0 +1,40 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-002 - Implement VerdictBundleBuilder
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Builder that assembles verdict bundles from EWS results.
/// </summary>
public interface IVerdictBundleBuilder
{
/// <summary>
/// Builds a verdict bundle from an EWS result.
/// </summary>
/// <param name="ewsResult">Evidence-weighted score result.</param>
/// <param name="input">Original EWS input for source metadata extraction.</param>
/// <param name="policy">Evidence weight policy used for scoring.</param>
/// <param name="gateConfig">Gate configuration for action determination.</param>
/// <returns>Assembled verdict bundle with bundle digest.</returns>
VerdictBundle Build(
EvidenceWeightedScoreResult ewsResult,
EvidenceWeightedScoreInput input,
EvidenceWeightPolicy policy,
GateConfiguration gateConfig);
/// <summary>
/// Builds a verdict bundle from an EWS result using default gate configuration.
/// </summary>
/// <param name="ewsResult">Evidence-weighted score result.</param>
/// <param name="input">Original EWS input for source metadata extraction.</param>
/// <param name="policy">Evidence weight policy used for scoring.</param>
/// <returns>Assembled verdict bundle with bundle digest.</returns>
VerdictBundle Build(
EvidenceWeightedScoreResult ewsResult,
EvidenceWeightedScoreInput input,
EvidenceWeightPolicy policy);
}

View File

@@ -0,0 +1,533 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-001 - Create VerdictBundle Model
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.DeltaVerdict.Manifest;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Unified verdict bundle that captures all data needed for auditable replay.
/// Content-addressed via canonical JSON + SHA-256.
/// </summary>
public sealed record VerdictBundle
{
/// <summary>
/// Unique bundle identifier (content-addressed, sha256:...).
/// </summary>
[JsonPropertyName("bundle_id")]
public required string BundleId { get; init; }
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
[JsonPropertyName("schema_version")]
public string SchemaVersion { get; init; } = CurrentSchemaVersion;
/// <summary>
/// Finding identifier (CVE@PURL format or similar).
/// </summary>
[JsonPropertyName("finding_id")]
public required string FindingId { get; init; }
/// <summary>
/// Reference to the scoring manifest used for this verdict.
/// </summary>
[JsonPropertyName("manifest_ref")]
public required ScoringManifestRef ManifestRef { get; init; }
/// <summary>
/// Canonicalized scoring inputs with source digests.
/// </summary>
[JsonPropertyName("inputs")]
public required VerdictInputs Inputs { get; init; }
/// <summary>
/// Normalization trace showing how inputs were normalized.
/// </summary>
[JsonPropertyName("normalization")]
public required NormalizationTrace Normalization { get; init; }
/// <summary>
/// Raw score before clamping [0, 1].
/// </summary>
[JsonPropertyName("raw_score")]
public required double RawScore { get; init; }
/// <summary>
/// Final score after clamping [0, 1].
/// </summary>
[JsonPropertyName("final_score")]
public required double FinalScore { get; init; }
/// <summary>
/// Override applied (e.g., VEX not_affected).
/// </summary>
[JsonPropertyName("override")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictOverride? Override { get; init; }
/// <summary>
/// Gate decision based on thresholds.
/// </summary>
[JsonPropertyName("gate")]
public required GateDecision Gate { get; init; }
/// <summary>
/// When verdict was computed (UTC).
/// </summary>
[JsonPropertyName("computed_at")]
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// SHA-256 digest of canonical bundle (excluding this field and signature).
/// </summary>
[JsonPropertyName("bundle_digest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? BundleDigest { get; init; }
/// <summary>
/// DSSE signature envelope (JSON).
/// </summary>
[JsonPropertyName("dsse_signature")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DsseSignature { get; init; }
/// <summary>
/// Rekor transparency log anchor.
/// </summary>
[JsonPropertyName("rekor_anchor")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public RekorLinkage? RekorAnchor { get; init; }
/// <summary>
/// Current schema version constant.
/// </summary>
public const string CurrentSchemaVersion = "stella-verdict/1.0.0";
}
/// <summary>
/// Reference to a scoring manifest used for verdict calculation.
/// </summary>
public sealed record ScoringManifestRef
{
/// <summary>
/// Scoring manifest version (e.g., "v2026-01-18-1").
/// </summary>
[JsonPropertyName("scoring_version")]
public required string ScoringVersion { get; init; }
/// <summary>
/// SHA-256 digest of the scoring manifest.
/// </summary>
[JsonPropertyName("manifest_digest")]
public required string ManifestDigest { get; init; }
}
/// <summary>
/// Canonicalized scoring inputs with source provenance.
/// </summary>
public sealed record VerdictInputs
{
/// <summary>
/// CVSS base score input with source.
/// </summary>
[JsonPropertyName("cvss")]
public required CvssInput Cvss { get; init; }
/// <summary>
/// EPSS probability input with source.
/// </summary>
[JsonPropertyName("epss")]
public required EpssInput Epss { get; init; }
/// <summary>
/// Reachability input with source.
/// </summary>
[JsonPropertyName("reachability")]
public required ReachabilityInputRecord Reachability { get; init; }
/// <summary>
/// Exploit maturity input with source.
/// </summary>
[JsonPropertyName("exploit_maturity")]
public required ExploitMaturityInput ExploitMaturity { get; init; }
/// <summary>
/// Patch proof confidence input with source.
/// </summary>
[JsonPropertyName("patch_proof")]
public required PatchProofInput PatchProof { get; init; }
/// <summary>
/// VEX statement input (if present).
/// </summary>
[JsonPropertyName("vex")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VexInput? Vex { get; init; }
}
/// <summary>
/// CVSS input with provenance.
/// </summary>
public sealed record CvssInput
{
/// <summary>Raw CVSS base score [0, 10].</summary>
[JsonPropertyName("base_score")]
public required double BaseScore { get; init; }
/// <summary>CVSS version (3.0, 3.1, 4.0).</summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>Full CVSS vector string.</summary>
[JsonPropertyName("vector")]
public string? Vector { get; init; }
/// <summary>Source of CVSS data.</summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>SHA-256 digest of source document.</summary>
[JsonPropertyName("source_digest")]
public string? SourceDigest { get; init; }
/// <summary>Timestamp when data was captured.</summary>
[JsonPropertyName("captured_at")]
public DateTimeOffset? CapturedAt { get; init; }
}
/// <summary>
/// EPSS input with provenance.
/// </summary>
public sealed record EpssInput
{
/// <summary>EPSS probability [0, 1].</summary>
[JsonPropertyName("probability")]
public required double Probability { get; init; }
/// <summary>EPSS percentile [0, 100].</summary>
[JsonPropertyName("percentile")]
public double? Percentile { get; init; }
/// <summary>EPSS model version.</summary>
[JsonPropertyName("model_version")]
public string? ModelVersion { get; init; }
/// <summary>EPSS model date.</summary>
[JsonPropertyName("model_date")]
public DateOnly? ModelDate { get; init; }
/// <summary>Source of EPSS data.</summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>Timestamp when data was captured.</summary>
[JsonPropertyName("captured_at")]
public DateTimeOffset? CapturedAt { get; init; }
}
/// <summary>
/// Reachability input with provenance.
/// </summary>
public sealed record ReachabilityInputRecord
{
/// <summary>Reachability level (none, package, function, caller).</summary>
[JsonPropertyName("level")]
public required string Level { get; init; }
/// <summary>Normalized reachability value [0, 1].</summary>
[JsonPropertyName("value")]
public required double Value { get; init; }
/// <summary>Confidence of reachability assessment.</summary>
[JsonPropertyName("confidence")]
public double? Confidence { get; init; }
/// <summary>Analysis method used.</summary>
[JsonPropertyName("method")]
public string? Method { get; init; }
/// <summary>Source of reachability data.</summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>Timestamp when data was captured.</summary>
[JsonPropertyName("captured_at")]
public DateTimeOffset? CapturedAt { get; init; }
}
/// <summary>
/// Exploit maturity input with provenance.
/// </summary>
public sealed record ExploitMaturityInput
{
/// <summary>Maturity level (none, poc, functional, high).</summary>
[JsonPropertyName("level")]
public required string Level { get; init; }
/// <summary>Normalized maturity value [0, 1].</summary>
[JsonPropertyName("value")]
public required double Value { get; init; }
/// <summary>Whether CVE is in KEV catalog.</summary>
[JsonPropertyName("in_kev")]
public bool? InKev { get; init; }
/// <summary>Source of maturity assessment.</summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>Timestamp when data was captured.</summary>
[JsonPropertyName("captured_at")]
public DateTimeOffset? CapturedAt { get; init; }
}
/// <summary>
/// Patch proof input with provenance.
/// </summary>
public sealed record PatchProofInput
{
/// <summary>Patch proof confidence [0, 1].</summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>Binary delta-sig confidence [0, 1].</summary>
[JsonPropertyName("delta_sig_confidence")]
public double? DeltaSigConfidence { get; init; }
/// <summary>Vendor fixed claim present.</summary>
[JsonPropertyName("vendor_fixed_claim")]
public bool? VendorFixedClaim { get; init; }
/// <summary>Verification method used.</summary>
[JsonPropertyName("method")]
public string? Method { get; init; }
/// <summary>Source of patch proof.</summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>Timestamp when data was captured.</summary>
[JsonPropertyName("captured_at")]
public DateTimeOffset? CapturedAt { get; init; }
}
/// <summary>
/// VEX input with provenance.
/// </summary>
public sealed record VexInput
{
/// <summary>VEX status (not_affected, affected, fixed, under_investigation).</summary>
[JsonPropertyName("status")]
public required string Status { get; init; }
/// <summary>VEX justification code (if not_affected).</summary>
[JsonPropertyName("justification")]
public string? Justification { get; init; }
/// <summary>VEX issuer identifier.</summary>
[JsonPropertyName("issuer")]
public required string Issuer { get; init; }
/// <summary>Whether issuer is authoritative (trusted key).</summary>
[JsonPropertyName("is_authoritative")]
public required bool IsAuthoritative { get; init; }
/// <summary>VEX document ID.</summary>
[JsonPropertyName("document_id")]
public string? DocumentId { get; init; }
/// <summary>SHA-256 digest of VEX document.</summary>
[JsonPropertyName("document_digest")]
public string? DocumentDigest { get; init; }
/// <summary>Timestamp of VEX statement.</summary>
[JsonPropertyName("timestamp")]
public DateTimeOffset? Timestamp { get; init; }
}
/// <summary>
/// Trace of how inputs were normalized for scoring.
/// </summary>
public sealed record NormalizationTrace
{
/// <summary>Per-dimension normalization details.</summary>
[JsonPropertyName("dimensions")]
public required ImmutableArray<DimensionNormalization> Dimensions { get; init; }
}
/// <summary>
/// Normalization details for a single dimension.
/// </summary>
public sealed record DimensionNormalization
{
/// <summary>Dimension name (cvss, epss, reachability, exploit_maturity, patch_proof).</summary>
[JsonPropertyName("dimension")]
public required string Dimension { get; init; }
/// <summary>Dimension symbol (CVS, EPS, RCH, XPL, PPF).</summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>Raw input value.</summary>
[JsonPropertyName("raw_value")]
public required double RawValue { get; init; }
/// <summary>Normalized value [0, 1].</summary>
[JsonPropertyName("normalized_value")]
public required double NormalizedValue { get; init; }
/// <summary>Normalization method applied.</summary>
[JsonPropertyName("method")]
public required string Method { get; init; }
/// <summary>Weight applied to this dimension.</summary>
[JsonPropertyName("weight")]
public required double Weight { get; init; }
/// <summary>Contribution to final score (weight * normalized).</summary>
[JsonPropertyName("contribution")]
public required double Contribution { get; init; }
/// <summary>Whether this dimension is subtractive.</summary>
[JsonPropertyName("is_subtractive")]
public bool IsSubtractive { get; init; }
}
/// <summary>
/// Override applied to the verdict.
/// </summary>
public sealed record VerdictOverride
{
/// <summary>Whether an override was applied.</summary>
[JsonPropertyName("applied")]
public required bool Applied { get; init; }
/// <summary>Type of override (vex_not_affected, vex_fixed, manual).</summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>Reason for the override.</summary>
[JsonPropertyName("reason")]
public required string Reason { get; init; }
/// <summary>Original score before override.</summary>
[JsonPropertyName("original_score")]
public double? OriginalScore { get; init; }
/// <summary>Source that triggered the override.</summary>
[JsonPropertyName("source")]
public string? Source { get; init; }
}
/// <summary>
/// Gate action enumeration.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum GateAction
{
/// <summary>Finding passed gate checks.</summary>
Pass,
/// <summary>Finding triggered a warning but not blocked.</summary>
Warn,
/// <summary>Finding blocked by gate policy.</summary>
Block
}
/// <summary>
/// Gate decision based on scoring thresholds.
/// </summary>
public sealed record GateDecision
{
/// <summary>Gate action result.</summary>
[JsonPropertyName("action")]
public required GateAction Action { get; init; }
/// <summary>Human-readable reason for the decision.</summary>
[JsonPropertyName("reason")]
public required string Reason { get; init; }
/// <summary>Threshold that triggered this decision.</summary>
[JsonPropertyName("threshold")]
public required double Threshold { get; init; }
/// <summary>Rules that matched for this decision.</summary>
[JsonPropertyName("matched_rules")]
public ImmutableArray<string> MatchedRules { get; init; } = [];
/// <summary>Suggested remediation actions.</summary>
[JsonPropertyName("suggestions")]
public ImmutableArray<string> Suggestions { get; init; } = [];
}
/// <summary>
/// Configuration for gate evaluation.
/// </summary>
public sealed record GateConfiguration
{
/// <summary>Score threshold for blocking [0, 1].</summary>
[JsonPropertyName("block_threshold")]
public double BlockThreshold { get; init; } = 0.65;
/// <summary>Score threshold for warning [0, 1].</summary>
[JsonPropertyName("warn_threshold")]
public double WarnThreshold { get; init; } = 0.40;
/// <summary>EPSS data staleness limit before triggering block.</summary>
[JsonPropertyName("epss_staleness_limit")]
public TimeSpan EpssStalenessLimit { get; init; } = TimeSpan.FromDays(7);
/// <summary>Patch proof confidence to bypass warning.</summary>
[JsonPropertyName("patch_proof_warn_bypass")]
public double PatchProofWarnBypass { get; init; } = 0.70;
/// <summary>Auto-pass on trusted VEX not_affected/fixed.</summary>
[JsonPropertyName("auto_pass_on_trusted_vex")]
public bool AutoPassOnTrustedVex { get; init; } = true;
/// <summary>Custom gate rules.</summary>
[JsonPropertyName("custom_rules")]
public ImmutableArray<GateRule> CustomRules { get; init; } = [];
/// <summary>Default gate configuration.</summary>
public static GateConfiguration Default => new();
/// <summary>Strict gate configuration (lower thresholds).</summary>
public static GateConfiguration Strict => new()
{
BlockThreshold = 0.50,
WarnThreshold = 0.30
};
}
/// <summary>
/// Custom gate rule definition.
/// </summary>
public sealed record GateRule
{
/// <summary>Rule identifier.</summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>Rule name.</summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>Condition expression.</summary>
[JsonPropertyName("condition")]
public required string Condition { get; init; }
/// <summary>Action when rule matches.</summary>
[JsonPropertyName("action")]
public required GateAction Action { get; init; }
/// <summary>Priority (lower = higher priority).</summary>
[JsonPropertyName("priority")]
public int Priority { get; init; }
}

View File

@@ -0,0 +1,479 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-002 - Implement VerdictBundleBuilder
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.DeltaVerdict.Manifest;
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Builds verdict bundles from EWS calculation results.
/// Assembles inputs, normalization trace, and gate decisions into
/// a content-addressed bundle suitable for signing and anchoring.
/// </summary>
public sealed class VerdictBundleBuilder : IVerdictBundleBuilder
{
private static readonly JsonSerializerOptions CanonicalSerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
private readonly IGateEvaluator _gateEvaluator;
private readonly TimeProvider _timeProvider;
private readonly IScoringManifestProvider? _manifestProvider;
/// <summary>
/// Creates a new verdict bundle builder.
/// </summary>
/// <param name="gateEvaluator">Gate evaluator for determining pass/warn/block.</param>
/// <param name="timeProvider">Optional time provider for timestamps.</param>
/// <param name="manifestProvider">Optional scoring manifest provider for manifest reference.</param>
public VerdictBundleBuilder(
IGateEvaluator gateEvaluator,
TimeProvider? timeProvider = null,
IScoringManifestProvider? manifestProvider = null)
{
_gateEvaluator = gateEvaluator ?? throw new ArgumentNullException(nameof(gateEvaluator));
_timeProvider = timeProvider ?? TimeProvider.System;
_manifestProvider = manifestProvider;
}
/// <summary>
/// Creates a verdict bundle builder with default gate evaluator.
/// </summary>
public VerdictBundleBuilder()
: this(new GateEvaluator())
{
}
/// <inheritdoc />
public VerdictBundle Build(
EvidenceWeightedScoreResult ewsResult,
EvidenceWeightedScoreInput input,
EvidenceWeightPolicy policy)
{
return Build(ewsResult, input, policy, GateConfiguration.Default);
}
/// <inheritdoc />
public VerdictBundle Build(
EvidenceWeightedScoreResult ewsResult,
EvidenceWeightedScoreInput input,
EvidenceWeightPolicy policy,
GateConfiguration gateConfig)
{
ArgumentNullException.ThrowIfNull(ewsResult);
ArgumentNullException.ThrowIfNull(input);
ArgumentNullException.ThrowIfNull(policy);
ArgumentNullException.ThrowIfNull(gateConfig);
var computedAt = _timeProvider.GetUtcNow();
// Extract inputs with source metadata
var verdictInputs = ExtractInputs(input, computedAt);
// Create normalization trace from EWS breakdown
var normalization = CreateNormalizationTrace(ewsResult);
// Get scoring manifest reference
var manifestRef = GetManifestRef(policy);
// Calculate scores
var rawScore = CalculateRawScore(ewsResult);
var finalScore = ewsResult.Score / 100.0; // Convert [0, 100] to [0, 1]
// Check for VEX override
var verdictOverride = GetVerdictOverride(input, ewsResult);
// Evaluate gate decision
var gateDecision = _gateEvaluator.Evaluate(
finalScore,
input,
gateConfig,
computedAt);
// Create the bundle (without digest initially)
var bundle = new VerdictBundle
{
BundleId = "", // Will be set after digest computation
SchemaVersion = VerdictBundle.CurrentSchemaVersion,
FindingId = ewsResult.FindingId,
ManifestRef = manifestRef,
Inputs = verdictInputs,
Normalization = normalization,
RawScore = rawScore,
FinalScore = finalScore,
Override = verdictOverride,
Gate = gateDecision,
ComputedAt = computedAt
};
// Compute bundle digest
var bundleDigest = ComputeBundleDigest(bundle);
// Return bundle with computed digest and ID
return bundle with
{
BundleId = bundleDigest,
BundleDigest = bundleDigest
};
}
/// <summary>
/// Extracts verdict inputs from EWS input with source metadata.
/// </summary>
private static VerdictInputs ExtractInputs(EvidenceWeightedScoreInput input, DateTimeOffset capturedAt)
{
return new VerdictInputs
{
Cvss = new CvssInput
{
BaseScore = input.CvssBase,
Version = input.CvssVersion ?? "3.1",
Vector = input.CvssDetails?.Vector,
Source = input.CvssDetails?.Source ?? "nvd",
CapturedAt = input.CvssDetails?.Timestamp ?? capturedAt
},
Epss = new EpssInput
{
Probability = input.EpssScore,
Percentile = input.EpssPercentile,
Source = input.EpssSource ?? "first.org",
CapturedAt = capturedAt
},
Reachability = new ReachabilityInputRecord
{
Level = GetReachabilityLevel(input),
Value = input.Rch,
Confidence = input.ReachabilityDetails?.Confidence,
Method = input.ReachabilityDetails?.AnalysisMethod,
Source = input.ReachabilityDetails?.EvidenceSource ?? "stella-scanner",
CapturedAt = input.ReachabilityDetails?.EvidenceTimestamp ?? capturedAt
},
ExploitMaturity = new ExploitMaturityInput
{
Level = input.ExploitMaturity.ToString().ToLowerInvariant(),
Value = EvidenceWeightedScoreInput.NormalizeExploitMaturity(input.ExploitMaturity),
InKev = input.ExploitDetails?.KevStatus == KevStatus.InKev,
Source = input.ExploitMaturitySource ?? input.ExploitDetails?.EpssSource ?? "nvd",
CapturedAt = capturedAt
},
PatchProof = new PatchProofInput
{
Confidence = input.PatchProofConfidence,
DeltaSigConfidence = input.PatchProofDetails?.DeltaSigConfidence,
VendorFixedClaim = input.PatchProofDetails?.VendorFixedClaim,
Method = input.PatchProofDetails?.VerificationMethod,
Source = input.PatchProofDetails?.Source ?? "stella-verifier",
CapturedAt = input.PatchProofDetails?.VerifiedAt ?? capturedAt
},
Vex = ExtractVexInput(input)
};
}
/// <summary>
/// Extracts VEX input if present.
/// </summary>
private static VexInput? ExtractVexInput(EvidenceWeightedScoreInput input)
{
if (string.IsNullOrEmpty(input.VexStatus))
return null;
return new VexInput
{
Status = input.VexStatus,
Justification = null, // Not captured in EWS input currently
Issuer = input.VexSource ?? "unknown",
IsAuthoritative = IsAuthoritativeVexSource(input.VexSource),
DocumentId = null,
DocumentDigest = null,
Timestamp = input.VexAnchor?.AttestationTimestamp
};
}
/// <summary>
/// Checks if VEX source is considered authoritative.
/// </summary>
private static bool IsAuthoritativeVexSource(string? vexSource)
{
if (string.IsNullOrEmpty(vexSource))
return false;
// In-project VEX is always authoritative
if (vexSource.StartsWith(".vex/", StringComparison.OrdinalIgnoreCase) ||
vexSource.StartsWith("in-project:", StringComparison.OrdinalIgnoreCase))
return true;
// Known authoritative sources
return vexSource.StartsWith("vendor:", StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Gets reachability level string from input.
/// </summary>
private static string GetReachabilityLevel(EvidenceWeightedScoreInput input)
{
if (input.ReachabilityDetails?.State == ReachabilityState.NotReachable)
return "none";
return input.Rch switch
{
>= 0.9 => "caller",
>= 0.7 => "function",
>= 0.3 => "package",
_ => "none"
};
}
/// <summary>
/// Creates normalization trace from EWS breakdown.
/// </summary>
private static NormalizationTrace CreateNormalizationTrace(EvidenceWeightedScoreResult ewsResult)
{
var dimensions = ewsResult.Breakdown
.Select(b => new DimensionNormalization
{
Dimension = b.Dimension.ToLowerInvariant().Replace(" ", "_"),
Symbol = b.Symbol,
RawValue = b.InputValue,
NormalizedValue = b.InputValue, // Already normalized in EWS
Method = b.IsSubtractive ? "subtractive" : "additive",
Weight = b.Weight,
Contribution = b.Contribution,
IsSubtractive = b.IsSubtractive
})
.ToImmutableArray();
return new NormalizationTrace
{
Dimensions = dimensions
};
}
/// <summary>
/// Gets scoring manifest reference from provider or creates default.
/// </summary>
private ScoringManifestRef GetManifestRef(EvidenceWeightPolicy policy)
{
if (_manifestProvider != null)
{
var manifest = _manifestProvider.GetCurrentManifest();
if (manifest != null)
{
return new ScoringManifestRef
{
ScoringVersion = manifest.ScoringVersion,
ManifestDigest = manifest.ManifestDigest ?? "sha256:unknown"
};
}
}
// Create reference from policy
return new ScoringManifestRef
{
ScoringVersion = $"v{DateTime.UtcNow:yyyy-MM-dd}-{policy.FormulaMode.ToString().ToLowerInvariant()}",
ManifestDigest = "sha256:" + policy.ComputeDigest()
};
}
/// <summary>
/// Calculates raw score from EWS result breakdown.
/// </summary>
private static double CalculateRawScore(EvidenceWeightedScoreResult ewsResult)
{
return ewsResult.Breakdown
.Sum(b => b.Contribution);
}
/// <summary>
/// Gets verdict override if VEX caused a score adjustment.
/// </summary>
private static VerdictOverride? GetVerdictOverride(
EvidenceWeightedScoreInput input,
EvidenceWeightedScoreResult ewsResult)
{
// Check if VEX override was applied (score 0 with vendor-na flag)
if (ewsResult.Score == 0 && ewsResult.Flags.Contains("vex-override"))
{
return new VerdictOverride
{
Applied = true,
Type = input.VexStatus?.ToLowerInvariant() switch
{
"not_affected" => "vex_not_affected",
"fixed" => "vex_fixed",
_ => "vex_override"
},
Reason = $"Authoritative VEX status: {input.VexStatus}",
OriginalScore = null, // Original score not tracked in override case
Source = input.VexSource
};
}
// Check for vendor-na cap
if (ewsResult.Caps.NotAffectedCap)
{
return new VerdictOverride
{
Applied = true,
Type = "not_affected_cap",
Reason = "Vendor not-affected cap applied",
OriginalScore = ewsResult.Caps.OriginalScore / 100.0,
Source = input.VexSource
};
}
return null;
}
/// <summary>
/// Computes SHA-256 digest of bundle for content addressing.
/// Excludes BundleId, BundleDigest, DsseSignature, and RekorAnchor from digest.
/// </summary>
private static string ComputeBundleDigest(VerdictBundle bundle)
{
// Create projection excluding fields that should not be in digest
var projection = new
{
schema_version = bundle.SchemaVersion,
finding_id = bundle.FindingId,
manifest_ref = new
{
scoring_version = bundle.ManifestRef.ScoringVersion,
manifest_digest = bundle.ManifestRef.ManifestDigest
},
inputs = ProjectInputs(bundle.Inputs),
normalization = ProjectNormalization(bundle.Normalization),
raw_score = bundle.RawScore,
final_score = bundle.FinalScore,
@override = ProjectOverride(bundle.Override),
gate = ProjectGate(bundle.Gate),
computed_at = bundle.ComputedAt.ToString("o")
};
var bytes = CanonJson.Canonicalize(projection, CanonicalSerializerOptions);
return CanonJson.Sha256Prefixed(bytes);
}
private static object ProjectInputs(VerdictInputs inputs)
{
return new
{
cvss = new
{
base_score = inputs.Cvss.BaseScore,
version = inputs.Cvss.Version,
vector = inputs.Cvss.Vector,
source = inputs.Cvss.Source,
captured_at = inputs.Cvss.CapturedAt?.ToString("o")
},
epss = new
{
probability = inputs.Epss.Probability,
percentile = inputs.Epss.Percentile,
source = inputs.Epss.Source,
captured_at = inputs.Epss.CapturedAt?.ToString("o")
},
reachability = new
{
level = inputs.Reachability.Level,
value = inputs.Reachability.Value,
confidence = inputs.Reachability.Confidence,
method = inputs.Reachability.Method,
source = inputs.Reachability.Source,
captured_at = inputs.Reachability.CapturedAt?.ToString("o")
},
exploit_maturity = new
{
level = inputs.ExploitMaturity.Level,
value = inputs.ExploitMaturity.Value,
in_kev = inputs.ExploitMaturity.InKev,
source = inputs.ExploitMaturity.Source,
captured_at = inputs.ExploitMaturity.CapturedAt?.ToString("o")
},
patch_proof = new
{
confidence = inputs.PatchProof.Confidence,
delta_sig_confidence = inputs.PatchProof.DeltaSigConfidence,
vendor_fixed_claim = inputs.PatchProof.VendorFixedClaim,
method = inputs.PatchProof.Method,
source = inputs.PatchProof.Source,
captured_at = inputs.PatchProof.CapturedAt?.ToString("o")
},
vex = inputs.Vex != null
? new
{
status = inputs.Vex.Status,
justification = inputs.Vex.Justification,
issuer = inputs.Vex.Issuer,
is_authoritative = inputs.Vex.IsAuthoritative,
timestamp = inputs.Vex.Timestamp?.ToString("o")
}
: null
};
}
private static object ProjectNormalization(NormalizationTrace normalization)
{
return new
{
dimensions = normalization.Dimensions.Select(d => new
{
dimension = d.Dimension,
symbol = d.Symbol,
raw_value = d.RawValue,
normalized_value = d.NormalizedValue,
method = d.Method,
weight = d.Weight,
contribution = d.Contribution,
is_subtractive = d.IsSubtractive
}).ToArray()
};
}
private static object? ProjectOverride(VerdictOverride? @override)
{
if (@override == null)
return null;
return new
{
applied = @override.Applied,
type = @override.Type,
reason = @override.Reason,
original_score = @override.OriginalScore,
source = @override.Source
};
}
private static object ProjectGate(GateDecision gate)
{
return new
{
action = gate.Action.ToString().ToLowerInvariant(),
reason = gate.Reason,
threshold = gate.Threshold,
matched_rules = gate.MatchedRules.ToArray(),
suggestions = gate.Suggestions.ToArray()
};
}
}
/// <summary>
/// Provider for current scoring manifest reference.
/// </summary>
public interface IScoringManifestProvider
{
/// <summary>
/// Gets the current scoring manifest.
/// </summary>
ScoringManifest? GetCurrentManifest();
}

View File

@@ -0,0 +1,460 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-004 - Verdict Rekor Anchoring
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.DeltaVerdict.Manifest;
using StellaOps.DeltaVerdict.Signing;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Service for anchoring verdict bundles to Rekor transparency log.
/// </summary>
public interface IVerdictRekorAnchorService
{
/// <summary>
/// Anchors a signed verdict bundle to Rekor.
/// </summary>
/// <param name="bundle">The signed bundle to anchor.</param>
/// <param name="options">Anchoring options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Result containing the anchored bundle or error.</returns>
Task<VerdictAnchorResult> AnchorAsync(
VerdictBundle bundle,
VerdictAnchorOptions options,
CancellationToken ct = default);
/// <summary>
/// Verifies a verdict bundle's Rekor anchor using stored inclusion proof.
/// </summary>
/// <param name="bundle">The bundle to verify.</param>
/// <param name="options">Verification options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<VerdictAnchorVerificationResult> VerifyAnchorAsync(
VerdictBundle bundle,
VerdictAnchorVerificationOptions options,
CancellationToken ct = default);
}
/// <summary>
/// Rekor anchor service for verdict bundles.
/// Submits signed verdict bundles to Rekor and stores inclusion proofs for offline verification.
/// </summary>
public sealed class VerdictRekorAnchorService : IVerdictRekorAnchorService
{
private readonly IRekorSubmissionClient _rekorClient;
private readonly TimeProvider _timeProvider;
public VerdictRekorAnchorService(IRekorSubmissionClient rekorClient)
: this(rekorClient, TimeProvider.System)
{
}
public VerdictRekorAnchorService(IRekorSubmissionClient rekorClient, TimeProvider timeProvider)
{
_rekorClient = rekorClient ?? throw new ArgumentNullException(nameof(rekorClient));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <inheritdoc />
public async Task<VerdictAnchorResult> AnchorAsync(
VerdictBundle bundle,
VerdictAnchorOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(bundle);
ArgumentNullException.ThrowIfNull(options);
// Validate bundle is signed
if (string.IsNullOrEmpty(bundle.DsseSignature))
{
return VerdictAnchorResult.Fail("Bundle must be signed before anchoring");
}
// Parse DSSE envelope to extract payload
VerdictDsseEnvelope? envelope;
try
{
envelope = JsonSerializer.Deserialize<VerdictDsseEnvelope>(bundle.DsseSignature);
}
catch (JsonException ex)
{
return VerdictAnchorResult.Fail($"Invalid DSSE signature: {ex.Message}");
}
if (envelope is null)
{
return VerdictAnchorResult.Fail("DSSE envelope is empty");
}
// Build submission request
var bundleDigest = ComputeBundleDigest(bundle.DsseSignature);
var request = new ManifestRekorSubmissionRequest
{
PayloadType = envelope.PayloadType,
PayloadBase64 = envelope.Payload,
Signatures = envelope.Signatures.Select(s => new ManifestRekorSignature
{
KeyId = s.KeyId,
Signature = s.Sig
}).ToList(),
BundleSha256 = bundleDigest,
ArtifactKind = "verdict-bundle",
ArtifactSha256 = bundle.BundleDigest ?? "unknown"
};
// Submit to Rekor
ManifestRekorSubmissionResponse response;
try
{
response = await _rekorClient.SubmitAsync(request, options.RekorUrl, ct);
}
catch (Exception ex)
{
return VerdictAnchorResult.Fail($"Rekor submission failed: {ex.Message}");
}
if (!response.Success)
{
return VerdictAnchorResult.Fail($"Rekor submission failed: {response.Error}");
}
// Build inclusion proof
InclusionProof? inclusionProof = null;
if (response.Proof is not null)
{
inclusionProof = new InclusionProof
{
TreeSize = response.Proof.TreeSize,
RootHash = response.Proof.RootHash,
Hashes = response.Proof.Hashes.ToImmutableArray(),
LogId = response.Proof.LogId
};
}
// Build Rekor linkage
var linkage = new RekorLinkage
{
Uuid = response.Uuid,
LogIndex = response.LogIndex,
IntegratedTime = response.IntegratedTime,
InclusionProof = inclusionProof
};
// Return bundle with linkage
var anchoredBundle = bundle with { RekorAnchor = linkage };
return VerdictAnchorResult.Success(anchoredBundle, linkage);
}
/// <inheritdoc />
public Task<VerdictAnchorVerificationResult> VerifyAnchorAsync(
VerdictBundle bundle,
VerdictAnchorVerificationOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(bundle);
ArgumentNullException.ThrowIfNull(options);
// Check for anchor
if (bundle.RekorAnchor is null)
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail("Bundle has no Rekor anchor"));
}
var anchor = bundle.RekorAnchor;
// Check for inclusion proof (required for offline verification)
if (anchor.InclusionProof is null && options.RequireInclusionProof)
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail(
"Bundle has no inclusion proof for offline verification"));
}
// Validate UUID format
if (string.IsNullOrEmpty(anchor.Uuid))
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail("Invalid Rekor UUID"));
}
// Validate log index
if (anchor.LogIndex < 0)
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail("Invalid log index"));
}
// Validate integrated time is reasonable
if (anchor.IntegratedTime <= 0)
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail("Invalid integrated time"));
}
var integratedTimeUtc = DateTimeOffset.FromUnixTimeSeconds(anchor.IntegratedTime);
var now = _timeProvider.GetUtcNow();
// Check for future timestamps (allow small skew)
if (integratedTimeUtc > now.AddMinutes(5))
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail(
$"Integrated time is in the future: {integratedTimeUtc:O}"));
}
// Check for very old timestamps if max age is specified
if (options.MaxAgeHours.HasValue)
{
var maxAge = TimeSpan.FromHours(options.MaxAgeHours.Value);
if (now - integratedTimeUtc > maxAge)
{
return Task.FromResult(VerdictAnchorVerificationResult.Fail(
$"Anchor is older than {options.MaxAgeHours} hours"));
}
}
// Verify inclusion proof if available
if (anchor.InclusionProof is not null && options.VerifyInclusionProof)
{
var proofVerification = VerifyInclusionProof(bundle, anchor.InclusionProof);
if (!proofVerification.IsValid)
{
return Task.FromResult(proofVerification);
}
}
return Task.FromResult(VerdictAnchorVerificationResult.Success(
anchor.Uuid,
anchor.LogIndex,
integratedTimeUtc));
}
private VerdictAnchorVerificationResult VerifyInclusionProof(VerdictBundle bundle, InclusionProof proof)
{
// Basic validation of proof structure
// Full Merkle tree verification would require the full tree path
if (string.IsNullOrEmpty(proof.RootHash))
{
return VerdictAnchorVerificationResult.Fail("Inclusion proof has no root hash");
}
if (proof.TreeSize <= 0)
{
return VerdictAnchorVerificationResult.Fail("Inclusion proof has invalid tree size");
}
if (string.IsNullOrEmpty(proof.LogId))
{
return VerdictAnchorVerificationResult.Fail("Inclusion proof has no log ID");
}
// In a full implementation, we would:
// 1. Compute the leaf hash from the bundle
// 2. Use the sibling hashes to compute the root
// 3. Compare with the stored root hash
// For now, we trust the stored proof structure
return VerdictAnchorVerificationResult.Success(
bundle.RekorAnchor!.Uuid,
bundle.RekorAnchor.LogIndex,
DateTimeOffset.FromUnixTimeSeconds(bundle.RekorAnchor.IntegratedTime));
}
private static string ComputeBundleDigest(string dsseSignature)
{
var bytes = Encoding.UTF8.GetBytes(dsseSignature);
var hash = SHA256.HashData(bytes);
return Convert.ToHexStringLower(hash);
}
}
/// <summary>
/// Options for anchoring a verdict bundle to Rekor.
/// </summary>
public sealed record VerdictAnchorOptions
{
/// <summary>
/// Rekor server URL.
/// </summary>
public required string RekorUrl { get; init; }
/// <summary>
/// Whether to archive the entry in Rekor.
/// </summary>
public bool Archive { get; init; } = true;
}
/// <summary>
/// Options for verifying a verdict bundle anchor.
/// </summary>
public sealed record VerdictAnchorVerificationOptions
{
/// <summary>
/// Whether to require inclusion proof for verification.
/// </summary>
public bool RequireInclusionProof { get; init; } = true;
/// <summary>
/// Whether to verify the inclusion proof mathematically.
/// </summary>
public bool VerifyInclusionProof { get; init; } = true;
/// <summary>
/// Maximum age of the anchor in hours (null = no limit).
/// </summary>
public int? MaxAgeHours { get; init; }
/// <summary>
/// Default verification options for online/offline use.
/// </summary>
public static VerdictAnchorVerificationOptions Default => new();
/// <summary>
/// Relaxed options for development/testing (no inclusion proof required).
/// </summary>
public static VerdictAnchorVerificationOptions Relaxed => new()
{
RequireInclusionProof = false,
VerifyInclusionProof = false
};
}
/// <summary>
/// Result of anchoring a verdict bundle to Rekor.
/// </summary>
public sealed record VerdictAnchorResult
{
/// <summary>
/// Whether anchoring was successful.
/// </summary>
public required bool IsSuccess { get; init; }
/// <summary>
/// The anchored bundle with Rekor linkage.
/// </summary>
public VerdictBundle? AnchoredBundle { get; init; }
/// <summary>
/// The Rekor linkage details.
/// </summary>
public RekorLinkage? Linkage { get; init; }
/// <summary>
/// Error message if anchoring failed.
/// </summary>
public string? Error { get; init; }
public static VerdictAnchorResult Success(VerdictBundle bundle, RekorLinkage linkage) => new()
{
IsSuccess = true,
AnchoredBundle = bundle,
Linkage = linkage
};
public static VerdictAnchorResult Fail(string error) => new()
{
IsSuccess = false,
Error = error
};
}
/// <summary>
/// Result of verifying a verdict bundle anchor.
/// </summary>
public sealed record VerdictAnchorVerificationResult
{
/// <summary>
/// Whether verification was successful.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Verified Rekor UUID.
/// </summary>
public string? VerifiedUuid { get; init; }
/// <summary>
/// Verified log index.
/// </summary>
public long? VerifiedLogIndex { get; init; }
/// <summary>
/// Verified integrated time.
/// </summary>
public DateTimeOffset? VerifiedIntegratedTime { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
public static VerdictAnchorVerificationResult Success(
string uuid,
long logIndex,
DateTimeOffset integratedTime) => new()
{
IsValid = true,
VerifiedUuid = uuid,
VerifiedLogIndex = logIndex,
VerifiedIntegratedTime = integratedTime
};
public static VerdictAnchorVerificationResult Fail(string error) => new()
{
IsValid = false,
Error = error
};
}
/// <summary>
/// Stub Rekor client for testing verdict anchoring.
/// </summary>
public sealed class StubVerdictRekorClient : IRekorSubmissionClient
{
private readonly TimeProvider _timeProvider;
private long _logIndex;
public StubVerdictRekorClient() : this(TimeProvider.System)
{
}
public StubVerdictRekorClient(TimeProvider timeProvider)
{
_timeProvider = timeProvider;
}
public Task<ManifestRekorSubmissionResponse> SubmitAsync(
ManifestRekorSubmissionRequest request,
string rekorUrl,
CancellationToken ct = default)
{
var logIndex = Interlocked.Increment(ref _logIndex);
var uuid = ComputeUuid(request.BundleSha256);
var integratedTime = _timeProvider.GetUtcNow().ToUnixTimeSeconds();
var response = new ManifestRekorSubmissionResponse
{
Success = true,
Uuid = uuid,
LogIndex = logIndex,
IntegratedTime = integratedTime,
Proof = new ManifestRekorProof
{
TreeSize = logIndex,
RootHash = request.BundleSha256,
Hashes = ImmutableArray<string>.Empty,
LogId = new Uri(rekorUrl).Host
}
};
return Task.FromResult(response);
}
private static string ComputeUuid(string digest)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(digest));
return new Guid(hash.AsSpan(0, 16)).ToString();
}
}

View File

@@ -0,0 +1,481 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-003 - Verdict DSSE Signing
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
namespace StellaOps.DeltaVerdict.Bundles;
/// <summary>
/// Service for DSSE signing of verdict bundles.
/// </summary>
public interface IVerdictSigningService
{
/// <summary>
/// Signs a verdict bundle using the provided options.
/// </summary>
/// <param name="bundle">The verdict bundle to sign.</param>
/// <param name="options">Signing options including key and algorithm.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The bundle with DSSE signature populated.</returns>
Task<VerdictBundle> SignAsync(
VerdictBundle bundle,
VerdictSigningOptions options,
CancellationToken ct = default);
/// <summary>
/// Verifies the DSSE signature on a verdict bundle.
/// </summary>
/// <param name="bundle">The bundle to verify.</param>
/// <param name="options">Verification options including expected key.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Verification result indicating success or failure.</returns>
Task<VerdictVerificationResult> VerifyAsync(
VerdictBundle bundle,
VerdictVerificationOptions options,
CancellationToken ct = default);
/// <summary>
/// Gets the canonical JSON representation of a verdict bundle for signing.
/// Excludes BundleId, DsseSignature, and RekorAnchor fields.
/// </summary>
/// <param name="bundle">The bundle to serialize.</param>
/// <returns>Canonical JSON string.</returns>
string GetCanonicalJson(VerdictBundle bundle);
}
/// <summary>
/// DSSE signing service for verdict bundles.
/// Implements PAE (Pre-Authentication Encoding) per DSSE specification.
/// </summary>
public sealed class VerdictSigningService : IVerdictSigningService
{
/// <summary>
/// DSSE payload type for verdict bundles.
/// </summary>
public const string PayloadType = "application/vnd.stella.scoring.v1+json";
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
private static readonly JsonSerializerOptions CanonicalSerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
/// <inheritdoc />
public Task<VerdictBundle> SignAsync(
VerdictBundle bundle,
VerdictSigningOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(bundle);
ArgumentNullException.ThrowIfNull(options);
ct.ThrowIfCancellationRequested();
// Clear any existing signature before signing
var bundleToSign = bundle with { DsseSignature = null };
// Build payload from canonical JSON
var payloadJson = GetCanonicalJson(bundleToSign);
var payloadBytes = Encoding.UTF8.GetBytes(payloadJson);
// Build DSSE envelope
var envelope = BuildEnvelope(payloadBytes, options);
var envelopeJson = JsonSerializer.Serialize(envelope, JsonOptions);
// Return bundle with signature
var signedBundle = bundleToSign with { DsseSignature = envelopeJson };
return Task.FromResult(signedBundle);
}
/// <inheritdoc />
public Task<VerdictVerificationResult> VerifyAsync(
VerdictBundle bundle,
VerdictVerificationOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(bundle);
ArgumentNullException.ThrowIfNull(options);
ct.ThrowIfCancellationRequested();
// Check for signature
if (string.IsNullOrEmpty(bundle.DsseSignature))
{
return Task.FromResult(VerdictVerificationResult.Fail("Bundle is not signed"));
}
// Parse DSSE envelope
VerdictDsseEnvelope? envelope;
try
{
envelope = JsonSerializer.Deserialize<VerdictDsseEnvelope>(bundle.DsseSignature, JsonOptions);
}
catch (JsonException ex)
{
return Task.FromResult(VerdictVerificationResult.Fail($"Invalid signature envelope: {ex.Message}"));
}
if (envelope is null)
{
return Task.FromResult(VerdictVerificationResult.Fail("Signature envelope is empty"));
}
// Verify payload type
if (!string.Equals(envelope.PayloadType, PayloadType, StringComparison.Ordinal))
{
return Task.FromResult(VerdictVerificationResult.Fail(
$"Invalid payload type: expected '{PayloadType}', got '{envelope.PayloadType}'"));
}
// Decode payload
byte[] payloadBytes;
try
{
payloadBytes = Convert.FromBase64String(envelope.Payload);
}
catch (FormatException ex)
{
return Task.FromResult(VerdictVerificationResult.Fail($"Invalid payload encoding: {ex.Message}"));
}
// Compute expected signature
var pae = BuildPae(envelope.PayloadType, payloadBytes);
var expectedSig = ComputeSignature(pae, options.Algorithm, options.SecretBase64);
// Find matching signature
var matched = envelope.Signatures.Any(sig =>
string.Equals(sig.KeyId, options.KeyId, StringComparison.Ordinal)
&& string.Equals(sig.Sig, expectedSig, StringComparison.Ordinal));
if (!matched)
{
return Task.FromResult(VerdictVerificationResult.Fail("Signature verification failed"));
}
// Verify payload matches current canonical form (tamper detection)
var expectedCanonicalJson = GetCanonicalJson(bundle with { DsseSignature = null });
var actualPayloadJson = Encoding.UTF8.GetString(payloadBytes);
if (!string.Equals(expectedCanonicalJson, actualPayloadJson, StringComparison.Ordinal))
{
return Task.FromResult(VerdictVerificationResult.Fail("Bundle content has been modified"));
}
return Task.FromResult(VerdictVerificationResult.Success(options.KeyId));
}
/// <inheritdoc />
public string GetCanonicalJson(VerdictBundle bundle)
{
// Build projection excluding fields that should not be in signed payload
var projection = new
{
schema_version = bundle.SchemaVersion,
finding_id = bundle.FindingId,
manifest_ref = new
{
scoring_version = bundle.ManifestRef.ScoringVersion,
manifest_digest = bundle.ManifestRef.ManifestDigest
},
inputs = ProjectInputs(bundle.Inputs),
normalization = ProjectNormalization(bundle.Normalization),
raw_score = bundle.RawScore,
final_score = bundle.FinalScore,
@override = ProjectOverride(bundle.Override),
gate = ProjectGate(bundle.Gate),
computed_at = bundle.ComputedAt.ToString("o"),
bundle_digest = bundle.BundleDigest
};
return CanonJson.Serialize(projection, CanonicalSerializerOptions);
}
private static object ProjectInputs(VerdictInputs inputs)
{
return new
{
cvss = new
{
base_score = inputs.Cvss.BaseScore,
version = inputs.Cvss.Version,
vector = inputs.Cvss.Vector,
source = inputs.Cvss.Source,
source_digest = inputs.Cvss.SourceDigest,
captured_at = inputs.Cvss.CapturedAt?.ToString("o")
},
epss = new
{
probability = inputs.Epss.Probability,
percentile = inputs.Epss.Percentile,
model_version = inputs.Epss.ModelVersion,
model_date = inputs.Epss.ModelDate?.ToString("o"),
source = inputs.Epss.Source,
captured_at = inputs.Epss.CapturedAt?.ToString("o")
},
reachability = new
{
level = inputs.Reachability.Level,
value = inputs.Reachability.Value,
confidence = inputs.Reachability.Confidence,
method = inputs.Reachability.Method,
source = inputs.Reachability.Source,
captured_at = inputs.Reachability.CapturedAt?.ToString("o")
},
exploit_maturity = new
{
level = inputs.ExploitMaturity.Level,
value = inputs.ExploitMaturity.Value,
in_kev = inputs.ExploitMaturity.InKev,
source = inputs.ExploitMaturity.Source,
captured_at = inputs.ExploitMaturity.CapturedAt?.ToString("o")
},
patch_proof = new
{
confidence = inputs.PatchProof.Confidence,
delta_sig_confidence = inputs.PatchProof.DeltaSigConfidence,
vendor_fixed_claim = inputs.PatchProof.VendorFixedClaim,
method = inputs.PatchProof.Method,
source = inputs.PatchProof.Source,
captured_at = inputs.PatchProof.CapturedAt?.ToString("o")
},
vex = inputs.Vex != null
? new
{
status = inputs.Vex.Status,
justification = inputs.Vex.Justification,
issuer = inputs.Vex.Issuer,
is_authoritative = inputs.Vex.IsAuthoritative,
document_id = inputs.Vex.DocumentId,
document_digest = inputs.Vex.DocumentDigest,
timestamp = inputs.Vex.Timestamp?.ToString("o")
}
: null
};
}
private static object ProjectNormalization(NormalizationTrace normalization)
{
return new
{
dimensions = normalization.Dimensions.Select(d => new
{
dimension = d.Dimension,
symbol = d.Symbol,
raw_value = d.RawValue,
normalized_value = d.NormalizedValue,
method = d.Method,
weight = d.Weight,
contribution = d.Contribution,
is_subtractive = d.IsSubtractive
}).ToArray()
};
}
private static object? ProjectOverride(VerdictOverride? @override)
{
if (@override == null)
return null;
return new
{
applied = @override.Applied,
type = @override.Type,
reason = @override.Reason,
original_score = @override.OriginalScore,
source = @override.Source
};
}
private static object ProjectGate(GateDecision gate)
{
return new
{
action = gate.Action.ToString().ToLowerInvariant(),
reason = gate.Reason,
threshold = gate.Threshold,
matched_rules = gate.MatchedRules.ToArray(),
suggestions = gate.Suggestions.ToArray()
};
}
private static VerdictDsseEnvelope BuildEnvelope(byte[] payload, VerdictSigningOptions options)
{
var pae = BuildPae(PayloadType, payload);
var signature = ComputeSignature(pae, options.Algorithm, options.SecretBase64);
return new VerdictDsseEnvelope(
PayloadType,
Convert.ToBase64String(payload),
[new VerdictDsseSignature(options.KeyId, signature)]);
}
private static string ComputeSignature(byte[] pae, VerdictSigningAlgorithm algorithm, string? secretBase64)
{
return algorithm switch
{
VerdictSigningAlgorithm.HmacSha256 => ComputeHmac(pae, secretBase64),
VerdictSigningAlgorithm.Sha256 => Convert.ToBase64String(SHA256.HashData(pae)),
_ => throw new InvalidOperationException($"Unsupported signing algorithm: {algorithm}")
};
}
private static string ComputeHmac(byte[] data, string? secretBase64)
{
if (string.IsNullOrWhiteSpace(secretBase64))
{
throw new InvalidOperationException("HMAC signing requires a base64 secret.");
}
var secret = Convert.FromBase64String(secretBase64);
var sig = HMACSHA256.HashData(secret, data);
return Convert.ToBase64String(sig);
}
/// <summary>
/// Builds the DSSE Pre-Authentication Encoding (PAE).
/// PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload
/// </summary>
private static byte[] BuildPae(string payloadType, byte[] payload)
{
const string prefix = "DSSEv1";
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var prefixBytes = Encoding.UTF8.GetBytes(prefix);
var lengthType = Encoding.UTF8.GetBytes(typeBytes.Length.ToString());
var lengthPayload = Encoding.UTF8.GetBytes(payload.Length.ToString());
using var stream = new MemoryStream();
stream.Write(prefixBytes);
stream.WriteByte((byte)' ');
stream.Write(lengthType);
stream.WriteByte((byte)' ');
stream.Write(typeBytes);
stream.WriteByte((byte)' ');
stream.Write(lengthPayload);
stream.WriteByte((byte)' ');
stream.Write(payload);
return stream.ToArray();
}
}
/// <summary>
/// Options for signing a verdict bundle.
/// </summary>
public sealed record VerdictSigningOptions
{
/// <summary>
/// Key identifier for the signing key.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Signing algorithm to use.
/// </summary>
public VerdictSigningAlgorithm Algorithm { get; init; } = VerdictSigningAlgorithm.HmacSha256;
/// <summary>
/// Base64-encoded secret for HMAC signing.
/// </summary>
public string? SecretBase64 { get; init; }
}
/// <summary>
/// Options for verifying a verdict bundle signature.
/// </summary>
public sealed record VerdictVerificationOptions
{
/// <summary>
/// Expected key identifier.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Algorithm used for signing.
/// </summary>
public VerdictSigningAlgorithm Algorithm { get; init; } = VerdictSigningAlgorithm.HmacSha256;
/// <summary>
/// Base64-encoded secret for HMAC verification.
/// </summary>
public string? SecretBase64 { get; init; }
}
/// <summary>
/// Supported signing algorithms for verdict bundles.
/// </summary>
public enum VerdictSigningAlgorithm
{
/// <summary>
/// HMAC-SHA256 (development/testing).
/// </summary>
HmacSha256,
/// <summary>
/// SHA-256 hash only (no key, for testing).
/// </summary>
Sha256
}
/// <summary>
/// Result of verdict bundle signature verification.
/// </summary>
public sealed record VerdictVerificationResult
{
/// <summary>
/// Whether verification was successful.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Key ID that verified the signature (if successful).
/// </summary>
public string? VerifiedKeyId { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Creates a successful verification result.
/// </summary>
public static VerdictVerificationResult Success(string keyId) => new()
{
IsValid = true,
VerifiedKeyId = keyId
};
/// <summary>
/// Creates a failed verification result.
/// </summary>
public static VerdictVerificationResult Fail(string error) => new()
{
IsValid = false,
Error = error
};
}
/// <summary>
/// DSSE envelope for verdict bundles.
/// </summary>
public sealed record VerdictDsseEnvelope(
[property: JsonPropertyName("payloadType")] string PayloadType,
[property: JsonPropertyName("payload")] string Payload,
[property: JsonPropertyName("signatures")] IReadOnlyList<VerdictDsseSignature> Signatures);
/// <summary>
/// DSSE signature entry.
/// </summary>
public sealed record VerdictDsseSignature(
[property: JsonPropertyName("keyid")] string KeyId,
[property: JsonPropertyName("sig")] string Sig);

View File

@@ -0,0 +1,271 @@
// -----------------------------------------------------------------------------
// PinnedInput.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-001 - Create PinnedInput Model
// Description: Generic pinned input wrapper with provenance tracking
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text.Json.Serialization;
namespace StellaOps.DeltaVerdict.Inputs;
/// <summary>
/// Generic pinned input wrapper that captures source provenance.
/// Enables deterministic replay by recording exactly when and how inputs were obtained.
/// </summary>
/// <typeparam name="T">The input value type.</typeparam>
public sealed record PinnedInput<T>
{
/// <summary>
/// The input value.
/// </summary>
[JsonPropertyName("value")]
public required T Value { get; init; }
/// <summary>
/// SHA-256 digest of the source content.
/// </summary>
[JsonPropertyName("sourceDigest")]
public required string SourceDigest { get; init; }
/// <summary>
/// When the input was retrieved.
/// </summary>
[JsonPropertyName("retrievedAt")]
public required DateTimeOffset RetrievedAt { get; init; }
/// <summary>
/// Source URL or identifier.
/// </summary>
[JsonPropertyName("source")]
public string? Source { get; init; }
/// <summary>
/// Version or ETag of the source.
/// </summary>
[JsonPropertyName("sourceVersion")]
public string? SourceVersion { get; init; }
/// <summary>
/// Signature metadata if source was signed.
/// </summary>
[JsonPropertyName("signature")]
public InputSignature? Signature { get; init; }
/// <summary>
/// Time-to-live for this input type.
/// </summary>
[JsonPropertyName("ttl")]
public TimeSpan? Ttl { get; init; }
/// <summary>
/// Whether the input has expired based on TTL.
/// </summary>
[JsonIgnore]
public bool IsExpired => Ttl.HasValue &&
DateTimeOffset.UtcNow > RetrievedAt.Add(Ttl.Value);
/// <summary>
/// Remaining time before expiration.
/// </summary>
[JsonIgnore]
public TimeSpan? TimeRemaining => Ttl.HasValue
? RetrievedAt.Add(Ttl.Value) - DateTimeOffset.UtcNow
: null;
/// <summary>
/// Creates a pinned input from a value with computed digest.
/// </summary>
public static PinnedInput<T> Create(
T value,
byte[] sourceContent,
DateTimeOffset? retrievedAt = null,
string? source = null,
TimeSpan? ttl = null)
{
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(sourceContent);
var digest = Convert.ToHexString(hash).ToLowerInvariant();
return new PinnedInput<T>
{
Value = value,
SourceDigest = digest,
RetrievedAt = retrievedAt ?? DateTimeOffset.UtcNow,
Source = source,
Ttl = ttl
};
}
/// <summary>
/// Creates a pinned input with pre-computed digest.
/// </summary>
public static PinnedInput<T> CreateWithDigest(
T value,
string sourceDigest,
DateTimeOffset retrievedAt,
string? source = null,
TimeSpan? ttl = null)
{
return new PinnedInput<T>
{
Value = value,
SourceDigest = sourceDigest,
RetrievedAt = retrievedAt,
Source = source,
Ttl = ttl
};
}
}
/// <summary>
/// Signature metadata for signed inputs.
/// </summary>
public sealed record InputSignature
{
/// <summary>Whether signature was present.</summary>
[JsonPropertyName("present")]
public bool Present { get; init; }
/// <summary>Whether signature was valid.</summary>
[JsonPropertyName("valid")]
public bool Valid { get; init; }
/// <summary>Signature format (e.g., "PGP", "PKCS7", "JWS").</summary>
[JsonPropertyName("format")]
public string? Format { get; init; }
/// <summary>Signing key ID or fingerprint.</summary>
[JsonPropertyName("keyId")]
public string? KeyId { get; init; }
/// <summary>Signer identity (email, DN, etc.).</summary>
[JsonPropertyName("signer")]
public string? Signer { get; init; }
/// <summary>Signature timestamp.</summary>
[JsonPropertyName("signedAt")]
public DateTimeOffset? SignedAt { get; init; }
/// <summary>Trust level of the signer.</summary>
[JsonPropertyName("trustLevel")]
public TrustLevel TrustLevel { get; init; } = TrustLevel.Unknown;
}
/// <summary>
/// Trust level for signed inputs.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum TrustLevel
{
/// <summary>Trust level unknown.</summary>
Unknown,
/// <summary>Untrusted source.</summary>
Untrusted,
/// <summary>Community-contributed.</summary>
Community,
/// <summary>Vendor-signed.</summary>
Vendor,
/// <summary>Authoritative source (NVD, MITRE, etc.).</summary>
Authoritative
}
/// <summary>
/// Container for all pinned scoring inputs.
/// </summary>
public sealed record PinnedScoringInputs
{
/// <summary>Pinned SBOM.</summary>
[JsonPropertyName("sbom")]
public PinnedInput<object>? Sbom { get; init; }
/// <summary>Pinned VEX statements.</summary>
[JsonPropertyName("vexStatements")]
public IReadOnlyList<PinnedInput<object>>? VexStatements { get; init; }
/// <summary>Pinned EPSS scores.</summary>
[JsonPropertyName("epssScores")]
public PinnedInput<IReadOnlyDictionary<string, double>>? EpssScores { get; init; }
/// <summary>Pinned CVSS scores.</summary>
[JsonPropertyName("cvssScores")]
public PinnedInput<IReadOnlyDictionary<string, double>>? CvssScores { get; init; }
/// <summary>Pinned reachability data.</summary>
[JsonPropertyName("reachability")]
public PinnedInput<object>? Reachability { get; init; }
/// <summary>Pinned KEV entries.</summary>
[JsonPropertyName("kevEntries")]
public PinnedInput<IReadOnlySet<string>>? KevEntries { get; init; }
/// <summary>Trusted VEX keys at evaluation time.</summary>
[JsonPropertyName("trustedVexKeys")]
public IReadOnlyList<TrustedKeyEntry>? TrustedVexKeys { get; init; }
/// <summary>Manifest digest for this input set.</summary>
[JsonPropertyName("manifestDigest")]
public string? ManifestDigest { get; init; }
}
/// <summary>
/// Trusted key entry for VEX verification.
/// </summary>
public sealed record TrustedKeyEntry
{
/// <summary>Key fingerprint (SHA-256 of SPKI).</summary>
[JsonPropertyName("fingerprint")]
public required string Fingerprint { get; init; }
/// <summary>Issuer name.</summary>
[JsonPropertyName("issuer")]
public required string Issuer { get; init; }
/// <summary>Key algorithm.</summary>
[JsonPropertyName("algorithm")]
public string? Algorithm { get; init; }
/// <summary>Valid from date.</summary>
[JsonPropertyName("validFrom")]
public DateTimeOffset ValidFrom { get; init; }
/// <summary>Valid until date.</summary>
[JsonPropertyName("validUntil")]
public DateTimeOffset? ValidUntil { get; init; }
/// <summary>Trust level.</summary>
[JsonPropertyName("trustLevel")]
public TrustLevel TrustLevel { get; init; } = TrustLevel.Vendor;
}
/// <summary>
/// Extended TTL options including EPSS-specific TTL per advisory.
/// </summary>
public sealed record ExtendedEvidenceTtlOptions
{
/// <summary>Configuration section name.</summary>
public const string SectionName = "Freshness";
/// <summary>SBOM TTL (default: 30 days).</summary>
public TimeSpan SbomTtl { get; init; } = TimeSpan.FromDays(30);
/// <summary>Reachability TTL (default: 7 days).</summary>
public TimeSpan ReachabilityTtl { get; init; } = TimeSpan.FromDays(7);
/// <summary>VEX TTL (default: 30 days).</summary>
public TimeSpan VexTtl { get; init; } = TimeSpan.FromDays(30);
/// <summary>EPSS TTL (default: 7 days per advisory).</summary>
public TimeSpan EpssTtl { get; init; } = TimeSpan.FromDays(7);
/// <summary>CVSS TTL (default: 90 days - scores change infrequently).</summary>
public TimeSpan CvssTtl { get; init; } = TimeSpan.FromDays(90);
/// <summary>KEV TTL (default: 1 day - checked frequently).</summary>
public TimeSpan KevTtl { get; init; } = TimeSpan.FromDays(1);
}

View File

@@ -0,0 +1,302 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-001 - Create ScoringManifest Model
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.DeltaVerdict.Manifest;
/// <summary>
/// Immutable scoring manifest that captures all scoring configuration for deterministic replay.
/// Content-addressed via canonical JSON + SHA-256.
/// </summary>
public sealed record ScoringManifest
{
/// <summary>
/// Schema version identifier (e.g., "stella-scoring/1.0.0").
/// </summary>
[JsonPropertyName("schema_version")]
public required string SchemaVersion { get; init; }
/// <summary>
/// Scoring version identifier (e.g., "v2026-01-18-1").
/// Bumps on any config change.
/// </summary>
[JsonPropertyName("scoring_version")]
public required string ScoringVersion { get; init; }
/// <summary>
/// Dimension weights for score calculation.
/// </summary>
[JsonPropertyName("weights")]
public required ScoringWeights Weights { get; init; }
/// <summary>
/// Input normalizers for dimension values.
/// </summary>
[JsonPropertyName("normalizers")]
public required ScoringNormalizers Normalizers { get; init; }
/// <summary>
/// Trusted VEX signing key fingerprints.
/// </summary>
[JsonPropertyName("trusted_vex_keys")]
public required ImmutableArray<string> TrustedVexKeys { get; init; }
/// <summary>
/// SHA-256 hash of the scoring code version.
/// </summary>
[JsonPropertyName("code_hash")]
public required string CodeHash { get; init; }
/// <summary>
/// Manifest creation timestamp (UTC).
/// </summary>
[JsonPropertyName("created_at")]
public DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Computed canonical digest of this manifest (sha256:...).
/// Excluded from digest computation.
/// </summary>
[JsonPropertyName("manifest_digest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ManifestDigest { get; init; }
/// <summary>
/// DSSE envelope containing the signed manifest.
/// Excluded from digest computation.
/// </summary>
[JsonPropertyName("dsse_signature")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DsseSignature { get; init; }
/// <summary>
/// Rekor transparency log anchor.
/// </summary>
[JsonPropertyName("rekor_anchor")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public RekorLinkage? RekorAnchor { get; init; }
/// <summary>
/// Current schema version constant.
/// </summary>
public const string CurrentSchemaVersion = "stella-scoring/1.0.0";
}
/// <summary>
/// Scoring dimension weights as specified in the advisory.
/// All weights should sum to approximately 1.0.
/// </summary>
public sealed record ScoringWeights
{
/// <summary>
/// Base CVSS score weight [0, 1].
/// </summary>
[JsonPropertyName("cvss_base")]
public required double CvssBase { get; init; }
/// <summary>
/// EPSS (Exploit Prediction Scoring System) weight [0, 1].
/// </summary>
[JsonPropertyName("epss")]
public required double Epss { get; init; }
/// <summary>
/// Reachability analysis weight [0, 1].
/// </summary>
[JsonPropertyName("reachability")]
public required double Reachability { get; init; }
/// <summary>
/// Exploit maturity weight [0, 1].
/// </summary>
[JsonPropertyName("exploit_maturity")]
public required double ExploitMaturity { get; init; }
/// <summary>
/// Patch proof confidence weight [0, 1].
/// </summary>
[JsonPropertyName("patch_proof_confidence")]
public required double PatchProofConfidence { get; init; }
/// <summary>
/// Default weights per advisory specification.
/// </summary>
public static ScoringWeights Default => new()
{
CvssBase = 0.25,
Epss = 0.20,
Reachability = 0.25,
ExploitMaturity = 0.15,
PatchProofConfidence = 0.15
};
/// <summary>
/// Sum of all weights (should be ~1.0).
/// </summary>
[JsonIgnore]
public double Sum => CvssBase + Epss + Reachability + ExploitMaturity + PatchProofConfidence;
/// <summary>
/// Validates weight values.
/// </summary>
public IReadOnlyList<string> Validate()
{
var errors = new List<string>();
ValidateWeight("cvss_base", CvssBase, errors);
ValidateWeight("epss", Epss, errors);
ValidateWeight("reachability", Reachability, errors);
ValidateWeight("exploit_maturity", ExploitMaturity, errors);
ValidateWeight("patch_proof_confidence", PatchProofConfidence, errors);
var sum = Sum;
if (sum < 0.99 || sum > 1.01)
{
errors.Add($"Weights should sum to ~1.0, got {sum:F3}");
}
return errors;
}
private static void ValidateWeight(string name, double value, List<string> errors)
{
if (double.IsNaN(value) || double.IsInfinity(value))
errors.Add($"{name} must be a valid number, got {value}");
else if (value < 0.0 || value > 1.0)
errors.Add($"{name} must be in range [0, 1], got {value}");
}
}
/// <summary>
/// Input normalization rules for scoring dimensions.
/// </summary>
public sealed record ScoringNormalizers
{
/// <summary>
/// CVSS score normalization range.
/// </summary>
[JsonPropertyName("cvss_range")]
public required NormalizerRange CvssRange { get; init; }
/// <summary>
/// EPSS score normalization range.
/// </summary>
[JsonPropertyName("epss_range")]
public required NormalizerRange EpssRange { get; init; }
/// <summary>
/// Reachability score normalization range.
/// </summary>
[JsonPropertyName("reachability_range")]
public required NormalizerRange ReachabilityRange { get; init; }
/// <summary>
/// Exploit maturity normalization range.
/// </summary>
[JsonPropertyName("exploit_maturity_range")]
public required NormalizerRange ExploitMaturityRange { get; init; }
/// <summary>
/// Default normalizers.
/// </summary>
public static ScoringNormalizers Default => new()
{
CvssRange = new NormalizerRange { Min = 0.0, Max = 10.0 },
EpssRange = new NormalizerRange { Min = 0.0, Max = 1.0 },
ReachabilityRange = new NormalizerRange { Min = 0.0, Max = 1.0 },
ExploitMaturityRange = new NormalizerRange { Min = 0.0, Max = 1.0 }
};
}
/// <summary>
/// Normalization range for a scoring dimension.
/// </summary>
public sealed record NormalizerRange
{
/// <summary>
/// Minimum expected input value.
/// </summary>
[JsonPropertyName("min")]
public required double Min { get; init; }
/// <summary>
/// Maximum expected input value.
/// </summary>
[JsonPropertyName("max")]
public required double Max { get; init; }
/// <summary>
/// Normalizes a value to [0, 1] range.
/// </summary>
public double Normalize(double value)
{
if (Max <= Min) return 0.0;
var clamped = Math.Max(Min, Math.Min(Max, value));
return (clamped - Min) / (Max - Min);
}
}
/// <summary>
/// Rekor transparency log linkage for audit trail.
/// </summary>
public sealed record RekorLinkage
{
/// <summary>
/// Rekor entry UUID.
/// </summary>
[JsonPropertyName("uuid")]
public required string Uuid { get; init; }
/// <summary>
/// Rekor log index.
/// </summary>
[JsonPropertyName("log_index")]
public required long LogIndex { get; init; }
/// <summary>
/// Timestamp when entry was integrated into log (Unix seconds).
/// </summary>
[JsonPropertyName("integrated_time")]
public required long IntegratedTime { get; init; }
/// <summary>
/// Inclusion proof for offline verification.
/// </summary>
[JsonPropertyName("inclusion_proof")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public InclusionProof? InclusionProof { get; init; }
}
/// <summary>
/// Merkle tree inclusion proof for offline verification.
/// </summary>
public sealed record InclusionProof
{
/// <summary>
/// Tree size at time of inclusion.
/// </summary>
[JsonPropertyName("tree_size")]
public required long TreeSize { get; init; }
/// <summary>
/// Root hash at time of inclusion.
/// </summary>
[JsonPropertyName("root_hash")]
public required string RootHash { get; init; }
/// <summary>
/// Sibling hashes for proof verification.
/// </summary>
[JsonPropertyName("hashes")]
public required ImmutableArray<string> Hashes { get; init; }
/// <summary>
/// Log ID for the transparency log.
/// </summary>
[JsonPropertyName("log_id")]
public required string LogId { get; init; }
}

View File

@@ -0,0 +1,459 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-006 - Manifest Version Bump Workflow
using System.Collections.Immutable;
using StellaOps.DeltaVerdict.Signing;
namespace StellaOps.DeltaVerdict.Manifest;
/// <summary>
/// Service for managing scoring manifest versions.
/// </summary>
public interface IScoringManifestVersioner
{
/// <summary>
/// Compares two manifests to determine if a version bump is required.
/// </summary>
/// <param name="current">The current manifest.</param>
/// <param name="proposed">The proposed manifest.</param>
/// <returns>Comparison result with change details.</returns>
ManifestComparisonResult Compare(ScoringManifest current, ScoringManifest proposed);
/// <summary>
/// Generates the next version string based on the current version.
/// </summary>
/// <param name="currentVersion">Current version string (e.g., "v2026-01-18-1").</param>
/// <returns>Next version string (e.g., "v2026-01-18-2" or "v2026-01-19-1").</returns>
string GenerateNextVersion(string? currentVersion = null);
/// <summary>
/// Creates a bumped manifest with new version, preserving history.
/// </summary>
/// <param name="current">The current manifest.</param>
/// <param name="proposed">The proposed changes.</param>
/// <param name="reason">Reason for the version bump.</param>
/// <returns>The bumped manifest result.</returns>
ManifestBumpResult Bump(ScoringManifest current, ScoringManifest proposed, string reason);
/// <summary>
/// Creates a bumped manifest and signs/anchors it.
/// </summary>
Task<ManifestBumpResult> BumpAndSignAsync(
ScoringManifest current,
ScoringManifest proposed,
string reason,
ManifestSigningOptions signingOptions,
ManifestAnchorOptions? anchorOptions = null,
CancellationToken ct = default);
}
/// <summary>
/// Default implementation of scoring manifest versioner.
/// </summary>
public sealed class ScoringManifestVersioner : IScoringManifestVersioner
{
private readonly IScoringManifestSigningService _signingService;
private readonly IScoringManifestRekorAnchorService? _anchorService;
private readonly TimeProvider _timeProvider;
public ScoringManifestVersioner()
: this(new ScoringManifestSigningService(), null, TimeProvider.System)
{
}
public ScoringManifestVersioner(
IScoringManifestSigningService signingService,
IScoringManifestRekorAnchorService? anchorService = null,
TimeProvider? timeProvider = null)
{
_signingService = signingService ?? throw new ArgumentNullException(nameof(signingService));
_anchorService = anchorService;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public ManifestComparisonResult Compare(ScoringManifest current, ScoringManifest proposed)
{
ArgumentNullException.ThrowIfNull(current);
ArgumentNullException.ThrowIfNull(proposed);
var changes = new List<ManifestChange>();
// Compare schema version
if (current.SchemaVersion != proposed.SchemaVersion)
{
changes.Add(new ManifestChange(
"schema_version",
current.SchemaVersion,
proposed.SchemaVersion,
ManifestChangeType.SchemaChange));
}
// Compare weights
CompareWeights(current.Weights, proposed.Weights, changes);
// Compare normalizers
CompareNormalizers(current.Normalizers, proposed.Normalizers, changes);
// Compare trusted VEX keys
if (!current.TrustedVexKeys.SequenceEqual(proposed.TrustedVexKeys))
{
var added = proposed.TrustedVexKeys.Except(current.TrustedVexKeys).ToList();
var removed = current.TrustedVexKeys.Except(proposed.TrustedVexKeys).ToList();
if (added.Count > 0)
{
changes.Add(new ManifestChange(
"trusted_vex_keys.added",
string.Join(", ", current.TrustedVexKeys),
string.Join(", ", added),
ManifestChangeType.TrustChange));
}
if (removed.Count > 0)
{
changes.Add(new ManifestChange(
"trusted_vex_keys.removed",
string.Join(", ", removed),
string.Join(", ", proposed.TrustedVexKeys),
ManifestChangeType.TrustChange));
}
}
// Compare code hash
if (current.CodeHash != proposed.CodeHash)
{
changes.Add(new ManifestChange(
"code_hash",
current.CodeHash,
proposed.CodeHash,
ManifestChangeType.CodeChange));
}
var requiresBump = changes.Count > 0;
return new ManifestComparisonResult
{
RequiresBump = requiresBump,
Changes = changes.ToImmutableArray(),
CurrentDigest = _signingService.ComputeDigest(current),
ProposedDigest = _signingService.ComputeDigest(proposed)
};
}
/// <inheritdoc />
public string GenerateNextVersion(string? currentVersion = null)
{
var now = _timeProvider.GetUtcNow();
var datePrefix = $"v{now:yyyy-MM-dd}";
if (string.IsNullOrEmpty(currentVersion))
{
return $"{datePrefix}-1";
}
// Parse current version: v2026-01-18-N
if (!currentVersion.StartsWith('v'))
{
return $"{datePrefix}-1";
}
var parts = currentVersion.Substring(1).Split('-');
if (parts.Length < 4)
{
return $"{datePrefix}-1";
}
// Check if same date
var currentDatePart = $"{parts[0]}-{parts[1]}-{parts[2]}";
var todayDatePart = now.ToString("yyyy-MM-dd");
if (currentDatePart == todayDatePart)
{
// Same date, increment sequence
if (int.TryParse(parts[3], out var sequence))
{
return $"{datePrefix}-{sequence + 1}";
}
return $"{datePrefix}-1";
}
// Different date, start new sequence
return $"{datePrefix}-1";
}
/// <inheritdoc />
public ManifestBumpResult Bump(ScoringManifest current, ScoringManifest proposed, string reason)
{
ArgumentNullException.ThrowIfNull(current);
ArgumentNullException.ThrowIfNull(proposed);
ArgumentException.ThrowIfNullOrWhiteSpace(reason);
var comparison = Compare(current, proposed);
if (!comparison.RequiresBump)
{
return ManifestBumpResult.NoBumpRequired(current, "No changes detected");
}
var newVersion = GenerateNextVersion(current.ScoringVersion);
var now = _timeProvider.GetUtcNow();
// Create bumped manifest
var bumpedManifest = proposed with
{
ScoringVersion = newVersion,
CreatedAt = now,
ManifestDigest = null, // Will be recomputed on sign
DsseSignature = null, // Will be recomputed on sign
RekorAnchor = null // Will be recomputed on anchor
};
// Create history entry
var historyEntry = new ManifestVersionHistoryEntry
{
PreviousVersion = current.ScoringVersion,
NewVersion = newVersion,
BumpedAt = now,
Reason = reason,
Changes = comparison.Changes,
PreviousDigest = comparison.CurrentDigest
};
return ManifestBumpResult.Success(bumpedManifest, historyEntry, comparison);
}
/// <inheritdoc />
public async Task<ManifestBumpResult> BumpAndSignAsync(
ScoringManifest current,
ScoringManifest proposed,
string reason,
ManifestSigningOptions signingOptions,
ManifestAnchorOptions? anchorOptions = null,
CancellationToken ct = default)
{
var bumpResult = Bump(current, proposed, reason);
if (!bumpResult.IsSuccess || bumpResult.BumpedManifest is null)
{
return bumpResult;
}
// Sign the manifest
var signedManifest = await _signingService.SignAsync(bumpResult.BumpedManifest, signingOptions, ct);
// Optionally anchor to Rekor
if (anchorOptions is not null && _anchorService is not null)
{
var anchorResult = await _anchorService.AnchorAsync(signedManifest, anchorOptions, ct);
if (!anchorResult.IsSuccess)
{
return ManifestBumpResult.Fail($"Anchoring failed: {anchorResult.Error}");
}
return bumpResult with { BumpedManifest = anchorResult.AnchoredManifest };
}
return bumpResult with { BumpedManifest = signedManifest };
}
private static void CompareWeights(ScoringWeights current, ScoringWeights proposed, List<ManifestChange> changes)
{
CompareWeight("weights.cvss_base", current.CvssBase, proposed.CvssBase, changes);
CompareWeight("weights.epss", current.Epss, proposed.Epss, changes);
CompareWeight("weights.reachability", current.Reachability, proposed.Reachability, changes);
CompareWeight("weights.exploit_maturity", current.ExploitMaturity, proposed.ExploitMaturity, changes);
CompareWeight("weights.patch_proof_confidence", current.PatchProofConfidence, proposed.PatchProofConfidence, changes);
}
private static void CompareWeight(string name, double current, double proposed, List<ManifestChange> changes)
{
// Use tolerance for floating point comparison
if (Math.Abs(current - proposed) > 0.0001)
{
changes.Add(new ManifestChange(
name,
current.ToString("F4"),
proposed.ToString("F4"),
ManifestChangeType.WeightChange));
}
}
private static void CompareNormalizers(ScoringNormalizers current, ScoringNormalizers proposed, List<ManifestChange> changes)
{
CompareRange("normalizers.cvss_range", current.CvssRange, proposed.CvssRange, changes);
CompareRange("normalizers.epss_range", current.EpssRange, proposed.EpssRange, changes);
CompareRange("normalizers.reachability_range", current.ReachabilityRange, proposed.ReachabilityRange, changes);
CompareRange("normalizers.exploit_maturity_range", current.ExploitMaturityRange, proposed.ExploitMaturityRange, changes);
}
private static void CompareRange(string name, NormalizerRange current, NormalizerRange proposed, List<ManifestChange> changes)
{
if (Math.Abs(current.Min - proposed.Min) > 0.0001 ||
Math.Abs(current.Max - proposed.Max) > 0.0001)
{
changes.Add(new ManifestChange(
name,
$"[{current.Min:F2}, {current.Max:F2}]",
$"[{proposed.Min:F2}, {proposed.Max:F2}]",
ManifestChangeType.NormalizerChange));
}
}
}
/// <summary>
/// Result of comparing two manifests.
/// </summary>
public sealed record ManifestComparisonResult
{
/// <summary>
/// Whether a version bump is required.
/// </summary>
public required bool RequiresBump { get; init; }
/// <summary>
/// List of detected changes.
/// </summary>
public ImmutableArray<ManifestChange> Changes { get; init; } = ImmutableArray<ManifestChange>.Empty;
/// <summary>
/// Digest of the current manifest.
/// </summary>
public string? CurrentDigest { get; init; }
/// <summary>
/// Digest of the proposed manifest.
/// </summary>
public string? ProposedDigest { get; init; }
}
/// <summary>
/// A single change detected between manifests.
/// </summary>
public sealed record ManifestChange(
string Field,
string OldValue,
string NewValue,
ManifestChangeType ChangeType);
/// <summary>
/// Type of manifest change.
/// </summary>
public enum ManifestChangeType
{
/// <summary>Schema version change.</summary>
SchemaChange,
/// <summary>Weight value change.</summary>
WeightChange,
/// <summary>Normalizer range change.</summary>
NormalizerChange,
/// <summary>Trusted VEX key change.</summary>
TrustChange,
/// <summary>Code hash change (algorithm update).</summary>
CodeChange
}
/// <summary>
/// Result of a manifest version bump.
/// </summary>
public sealed record ManifestBumpResult
{
/// <summary>
/// Whether the bump was successful.
/// </summary>
public required bool IsSuccess { get; init; }
/// <summary>
/// Whether a bump was required.
/// </summary>
public bool BumpRequired { get; init; }
/// <summary>
/// The bumped manifest (may be signed and anchored).
/// </summary>
public ScoringManifest? BumpedManifest { get; init; }
/// <summary>
/// History entry for the bump.
/// </summary>
public ManifestVersionHistoryEntry? HistoryEntry { get; init; }
/// <summary>
/// Comparison result that triggered the bump.
/// </summary>
public ManifestComparisonResult? Comparison { get; init; }
/// <summary>
/// Error message if bump failed.
/// </summary>
public string? Error { get; init; }
public static ManifestBumpResult Success(
ScoringManifest manifest,
ManifestVersionHistoryEntry historyEntry,
ManifestComparisonResult comparison) => new()
{
IsSuccess = true,
BumpRequired = true,
BumpedManifest = manifest,
HistoryEntry = historyEntry,
Comparison = comparison
};
public static ManifestBumpResult NoBumpRequired(ScoringManifest manifest, string message) => new()
{
IsSuccess = true,
BumpRequired = false,
BumpedManifest = manifest,
Error = message
};
public static ManifestBumpResult Fail(string error) => new()
{
IsSuccess = false,
Error = error
};
}
/// <summary>
/// History entry for a manifest version bump.
/// </summary>
public sealed record ManifestVersionHistoryEntry
{
/// <summary>
/// Previous version string.
/// </summary>
public required string PreviousVersion { get; init; }
/// <summary>
/// New version string.
/// </summary>
public required string NewVersion { get; init; }
/// <summary>
/// Timestamp of the bump.
/// </summary>
public required DateTimeOffset BumpedAt { get; init; }
/// <summary>
/// Reason for the bump.
/// </summary>
public required string Reason { get; init; }
/// <summary>
/// Changes that triggered the bump.
/// </summary>
public ImmutableArray<ManifestChange> Changes { get; init; } = ImmutableArray<ManifestChange>.Empty;
/// <summary>
/// Digest of the previous manifest.
/// </summary>
public string? PreviousDigest { get; init; }
}

View File

@@ -0,0 +1,350 @@
// -----------------------------------------------------------------------------
// VerdictInputsSerializer.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-008 - VerdictInputs Serialization with Provenance
// Description: Canonical serialization for PinnedScoringInputs with snake_case naming
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.DeltaVerdict.Inputs;
namespace StellaOps.DeltaVerdict.Serialization;
/// <summary>
/// Serializer for <see cref="PinnedScoringInputs"/> with full provenance support.
/// Uses snake_case naming and RFC 8785 canonical JSON for deterministic replay.
/// </summary>
public static class VerdictInputsSerializer
{
/// <summary>
/// JSON options for snake_case serialization per advisory requirements.
/// </summary>
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
/// <summary>
/// JSON options for indented output (human-readable).
/// </summary>
private static readonly JsonSerializerOptions IndentedJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
/// <summary>
/// Serializes pinned scoring inputs to canonical JSON.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to serialize.</param>
/// <returns>Canonical JSON string with snake_case naming.</returns>
public static string Serialize(PinnedScoringInputs inputs)
{
ArgumentNullException.ThrowIfNull(inputs);
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(inputs, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
/// <summary>
/// Serializes pinned scoring inputs to versioned canonical JSON.
/// Includes canon version marker for forward compatibility.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to serialize.</param>
/// <param name="version">Canon version (default: current).</param>
/// <returns>Versioned canonical JSON string.</returns>
public static string SerializeVersioned(
PinnedScoringInputs inputs,
string? version = null)
{
ArgumentNullException.ThrowIfNull(inputs);
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(inputs, JsonOptions);
var canonicalBytes = version != null
? CanonJson.CanonicalizeWithVersion(jsonBytes, version)
: CanonJson.CanonicalizeVersioned(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
/// <summary>
/// Serializes pinned scoring inputs to indented JSON for debugging/display.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to serialize.</param>
/// <returns>Indented JSON string with snake_case naming.</returns>
public static string SerializeIndented(PinnedScoringInputs inputs)
{
ArgumentNullException.ThrowIfNull(inputs);
return JsonSerializer.Serialize(inputs, IndentedJsonOptions);
}
/// <summary>
/// Deserializes JSON to pinned scoring inputs.
/// </summary>
/// <param name="json">The JSON string to deserialize.</param>
/// <returns>Deserialized pinned scoring inputs.</returns>
/// <exception cref="InvalidOperationException">If deserialization fails.</exception>
public static PinnedScoringInputs Deserialize(string json)
{
if (string.IsNullOrWhiteSpace(json))
{
throw new ArgumentException("JSON cannot be null or empty", nameof(json));
}
return JsonSerializer.Deserialize<PinnedScoringInputs>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize pinned scoring inputs");
}
/// <summary>
/// Computes SHA-256 digest of the pinned inputs, excluding mutable fields.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to hash.</param>
/// <returns>Lowercase hex-encoded SHA-256 digest.</returns>
public static string ComputeDigest(PinnedScoringInputs inputs)
{
ArgumentNullException.ThrowIfNull(inputs);
// Create copy without mutable fields for digest computation
var forHashing = inputs with { ManifestDigest = null };
var json = Serialize(forHashing);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(hash).ToLowerInvariant();
}
/// <summary>
/// Computes SHA-256 digest with sha256: prefix.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to hash.</param>
/// <returns>Prefixed SHA-256 digest (sha256:...).</returns>
public static string ComputePrefixedDigest(PinnedScoringInputs inputs)
{
return $"sha256:{ComputeDigest(inputs)}";
}
/// <summary>
/// Returns inputs with computed manifest digest.
/// </summary>
/// <param name="inputs">The pinned scoring inputs.</param>
/// <returns>Inputs with ManifestDigest populated.</returns>
public static PinnedScoringInputs WithDigest(PinnedScoringInputs inputs)
{
return inputs with { ManifestDigest = ComputePrefixedDigest(inputs) };
}
/// <summary>
/// Verifies that the manifest digest matches the computed digest.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to verify.</param>
/// <returns>True if digest matches, false otherwise.</returns>
public static bool VerifyDigest(PinnedScoringInputs inputs)
{
if (inputs.ManifestDigest == null)
{
return false;
}
var computed = ComputePrefixedDigest(inputs);
return string.Equals(inputs.ManifestDigest, computed, StringComparison.Ordinal);
}
}
/// <summary>
/// Wrapper for serializing verdict bundle with full input provenance.
/// </summary>
public sealed record VerdictBundle
{
/// <summary>Schema version for forward compatibility.</summary>
[JsonPropertyName("schema_version")]
public string SchemaVersion { get; init; } = "1.0.0";
/// <summary>Unique bundle identifier.</summary>
[JsonPropertyName("bundle_id")]
public required string BundleId { get; init; }
/// <summary>When the bundle was created.</summary>
[JsonPropertyName("created_at")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>Pinned scoring inputs with full provenance.</summary>
[JsonPropertyName("inputs")]
public required PinnedScoringInputs Inputs { get; init; }
/// <summary>Optional validation result.</summary>
[JsonPropertyName("validation")]
public ValidationSummary? Validation { get; init; }
/// <summary>SHA-256 digest of the bundle (excluding this field).</summary>
[JsonPropertyName("bundle_digest")]
public string? BundleDigest { get; init; }
}
/// <summary>
/// Validation summary for the verdict bundle.
/// </summary>
public sealed record ValidationSummary
{
/// <summary>Overall validation status.</summary>
[JsonPropertyName("status")]
public required ValidationStatus Status { get; init; }
/// <summary>Combined confidence score [0, 1].</summary>
[JsonPropertyName("combined_confidence")]
public double CombinedConfidence { get; init; }
/// <summary>Count of stale inputs.</summary>
[JsonPropertyName("stale_input_count")]
public int StaleInputCount { get; init; }
/// <summary>Count of unsigned inputs.</summary>
[JsonPropertyName("unsigned_input_count")]
public int UnsignedInputCount { get; init; }
/// <summary>Whether VEX override is allowed.</summary>
[JsonPropertyName("vex_override_allowed")]
public bool VexOverrideAllowed { get; init; }
/// <summary>Validation audit entries.</summary>
[JsonPropertyName("audit_entries")]
public IReadOnlyList<AuditEntry>? AuditEntries { get; init; }
}
/// <summary>
/// Audit entry for validation tracking.
/// </summary>
public sealed record AuditEntry
{
/// <summary>Input type that was validated.</summary>
[JsonPropertyName("input_type")]
public required string InputType { get; init; }
/// <summary>Reason for the validation result.</summary>
[JsonPropertyName("reason")]
public required string Reason { get; init; }
/// <summary>Confidence before discount.</summary>
[JsonPropertyName("original_confidence")]
public double OriginalConfidence { get; init; }
/// <summary>Discount multiplier applied.</summary>
[JsonPropertyName("discount_applied")]
public double DiscountApplied { get; init; }
/// <summary>Final confidence after discount.</summary>
[JsonPropertyName("final_confidence")]
public double FinalConfidence { get; init; }
}
/// <summary>
/// Validation status enum.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ValidationStatus
{
/// <summary>All inputs valid with high confidence.</summary>
[JsonPropertyName("valid")]
Valid,
/// <summary>Inputs valid but with some degradation.</summary>
[JsonPropertyName("degraded")]
Degraded,
/// <summary>Some inputs failed validation.</summary>
[JsonPropertyName("partial_failure")]
PartialFailure,
/// <summary>Critical validation failure.</summary>
[JsonPropertyName("failed")]
Failed
}
/// <summary>
/// Serializer for <see cref="VerdictBundle"/> with full provenance support.
/// </summary>
public static class VerdictBundleSerializer
{
/// <summary>
/// JSON options for snake_case serialization.
/// </summary>
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
/// <summary>
/// Serializes verdict bundle to canonical JSON.
/// </summary>
public static string Serialize(VerdictBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(bundle, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
/// <summary>
/// Deserializes JSON to verdict bundle.
/// </summary>
public static VerdictBundle Deserialize(string json)
{
if (string.IsNullOrWhiteSpace(json))
{
throw new ArgumentException("JSON cannot be null or empty", nameof(json));
}
return JsonSerializer.Deserialize<VerdictBundle>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize verdict bundle");
}
/// <summary>
/// Computes SHA-256 digest of the bundle, excluding mutable fields.
/// </summary>
public static string ComputeDigest(VerdictBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
var forHashing = bundle with { BundleDigest = null };
var json = Serialize(forHashing);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>
/// Returns bundle with computed digest.
/// </summary>
public static VerdictBundle WithDigest(VerdictBundle bundle)
{
return bundle with { BundleDigest = ComputeDigest(bundle) };
}
/// <summary>
/// Verifies that the bundle digest matches the computed digest.
/// </summary>
public static bool VerifyDigest(VerdictBundle bundle)
{
if (bundle.BundleDigest == null)
{
return false;
}
var computed = ComputeDigest(bundle);
return string.Equals(bundle.BundleDigest, computed, StringComparison.Ordinal);
}
}

View File

@@ -0,0 +1,500 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-005 - Scoring Manifest Rekor Anchoring
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.DeltaVerdict.Manifest;
namespace StellaOps.DeltaVerdict.Signing;
/// <summary>
/// Service for anchoring scoring manifests to Rekor transparency log.
/// </summary>
public interface IScoringManifestRekorAnchorService
{
/// <summary>
/// Anchors a signed scoring manifest to Rekor.
/// </summary>
/// <param name="manifest">The signed manifest to anchor.</param>
/// <param name="options">Anchoring options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The manifest with Rekor linkage populated.</returns>
Task<ManifestAnchorResult> AnchorAsync(
ScoringManifest manifest,
ManifestAnchorOptions options,
CancellationToken ct = default);
/// <summary>
/// Verifies a manifest's Rekor anchor using stored inclusion proof.
/// </summary>
/// <param name="manifest">The manifest to verify.</param>
/// <param name="options">Verification options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<ManifestAnchorVerificationResult> VerifyAnchorAsync(
ScoringManifest manifest,
ManifestAnchorVerificationOptions options,
CancellationToken ct = default);
}
/// <summary>
/// Rekor anchor service for scoring manifests.
/// </summary>
public sealed class ScoringManifestRekorAnchorService : IScoringManifestRekorAnchorService
{
private readonly IRekorSubmissionClient _rekorClient;
private readonly TimeProvider _timeProvider;
public ScoringManifestRekorAnchorService(IRekorSubmissionClient rekorClient)
: this(rekorClient, TimeProvider.System)
{
}
public ScoringManifestRekorAnchorService(IRekorSubmissionClient rekorClient, TimeProvider timeProvider)
{
_rekorClient = rekorClient ?? throw new ArgumentNullException(nameof(rekorClient));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <inheritdoc />
public async Task<ManifestAnchorResult> AnchorAsync(
ScoringManifest manifest,
ManifestAnchorOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(options);
// Validate manifest is signed
if (string.IsNullOrEmpty(manifest.DsseSignature))
{
return ManifestAnchorResult.Fail("Manifest must be signed before anchoring");
}
// Parse DSSE envelope to extract payload
ManifestDsseEnvelope? envelope;
try
{
envelope = JsonSerializer.Deserialize<ManifestDsseEnvelope>(manifest.DsseSignature);
}
catch (JsonException ex)
{
return ManifestAnchorResult.Fail($"Invalid DSSE signature: {ex.Message}");
}
if (envelope is null)
{
return ManifestAnchorResult.Fail("DSSE envelope is empty");
}
// Build submission request
var bundleDigest = ComputeBundleDigest(manifest.DsseSignature);
var request = new ManifestRekorSubmissionRequest
{
PayloadType = envelope.PayloadType,
PayloadBase64 = envelope.Payload,
Signatures = envelope.Signatures.Select(s => new ManifestRekorSignature
{
KeyId = s.KeyId,
Signature = s.Sig
}).ToList(),
BundleSha256 = bundleDigest,
ArtifactKind = "scoring-manifest",
ArtifactSha256 = manifest.ManifestDigest ?? "unknown"
};
// Submit to Rekor
ManifestRekorSubmissionResponse response;
try
{
response = await _rekorClient.SubmitAsync(request, options.RekorUrl, ct);
}
catch (Exception ex)
{
return ManifestAnchorResult.Fail($"Rekor submission failed: {ex.Message}");
}
if (!response.Success)
{
return ManifestAnchorResult.Fail($"Rekor submission failed: {response.Error}");
}
// Build inclusion proof
InclusionProof? inclusionProof = null;
if (response.Proof is not null)
{
inclusionProof = new InclusionProof
{
TreeSize = response.Proof.TreeSize,
RootHash = response.Proof.RootHash,
Hashes = response.Proof.Hashes.ToImmutableArray(),
LogId = response.Proof.LogId
};
}
// Build Rekor linkage
var linkage = new RekorLinkage
{
Uuid = response.Uuid,
LogIndex = response.LogIndex,
IntegratedTime = response.IntegratedTime,
InclusionProof = inclusionProof
};
// Return manifest with linkage
var anchoredManifest = manifest with { RekorAnchor = linkage };
return ManifestAnchorResult.Success(anchoredManifest, linkage);
}
/// <inheritdoc />
public Task<ManifestAnchorVerificationResult> VerifyAnchorAsync(
ScoringManifest manifest,
ManifestAnchorVerificationOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(options);
// Check for anchor
if (manifest.RekorAnchor is null)
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail("Manifest has no Rekor anchor"));
}
var anchor = manifest.RekorAnchor;
// Check for inclusion proof (required for offline verification)
if (anchor.InclusionProof is null && options.RequireInclusionProof)
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail("Manifest has no inclusion proof for offline verification"));
}
// Validate UUID format
if (string.IsNullOrEmpty(anchor.Uuid))
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail("Invalid Rekor UUID"));
}
// Validate log index
if (anchor.LogIndex < 0)
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail("Invalid log index"));
}
// Validate integrated time is reasonable
if (anchor.IntegratedTime <= 0)
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail("Invalid integrated time"));
}
var integratedTimeUtc = DateTimeOffset.FromUnixTimeSeconds(anchor.IntegratedTime);
var now = _timeProvider.GetUtcNow();
// Check for future timestamps (allow small skew)
if (integratedTimeUtc > now.AddMinutes(5))
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail(
$"Integrated time is in the future: {integratedTimeUtc:O}"));
}
// Check for very old timestamps if max age is specified
if (options.MaxAgeHours.HasValue)
{
var maxAge = TimeSpan.FromHours(options.MaxAgeHours.Value);
if (now - integratedTimeUtc > maxAge)
{
return Task.FromResult(ManifestAnchorVerificationResult.Fail(
$"Anchor is older than {options.MaxAgeHours} hours"));
}
}
// Verify inclusion proof if available
if (anchor.InclusionProof is not null && options.VerifyInclusionProof)
{
var proofVerification = VerifyInclusionProof(manifest, anchor.InclusionProof);
if (!proofVerification.IsValid)
{
return Task.FromResult(proofVerification);
}
}
return Task.FromResult(ManifestAnchorVerificationResult.Success(
anchor.Uuid,
anchor.LogIndex,
integratedTimeUtc));
}
private ManifestAnchorVerificationResult VerifyInclusionProof(ScoringManifest manifest, InclusionProof proof)
{
// For now, basic validation of proof structure
// Full Merkle tree verification would require the full tree path
if (string.IsNullOrEmpty(proof.RootHash))
{
return ManifestAnchorVerificationResult.Fail("Inclusion proof has no root hash");
}
if (proof.TreeSize <= 0)
{
return ManifestAnchorVerificationResult.Fail("Inclusion proof has invalid tree size");
}
if (string.IsNullOrEmpty(proof.LogId))
{
return ManifestAnchorVerificationResult.Fail("Inclusion proof has no log ID");
}
// In a full implementation, we would:
// 1. Compute the leaf hash from the manifest
// 2. Use the sibling hashes to compute the root
// 3. Compare with the stored root hash
// For now, we trust the stored proof structure
return ManifestAnchorVerificationResult.Success(
manifest.RekorAnchor!.Uuid,
manifest.RekorAnchor.LogIndex,
DateTimeOffset.FromUnixTimeSeconds(manifest.RekorAnchor.IntegratedTime));
}
private static string ComputeBundleDigest(string dsseSignature)
{
var bytes = Encoding.UTF8.GetBytes(dsseSignature);
var hash = SHA256.HashData(bytes);
return Convert.ToHexStringLower(hash);
}
}
/// <summary>
/// Options for anchoring a manifest to Rekor.
/// </summary>
public sealed record ManifestAnchorOptions
{
/// <summary>
/// Rekor server URL.
/// </summary>
public required string RekorUrl { get; init; }
/// <summary>
/// Whether to archive the entry in Rekor.
/// </summary>
public bool Archive { get; init; } = true;
}
/// <summary>
/// Options for verifying a manifest anchor.
/// </summary>
public sealed record ManifestAnchorVerificationOptions
{
/// <summary>
/// Whether to require inclusion proof for verification.
/// </summary>
public bool RequireInclusionProof { get; init; } = true;
/// <summary>
/// Whether to verify the inclusion proof mathematically.
/// </summary>
public bool VerifyInclusionProof { get; init; } = true;
/// <summary>
/// Maximum age of the anchor in hours (null = no limit).
/// </summary>
public int? MaxAgeHours { get; init; }
}
/// <summary>
/// Result of anchoring a manifest to Rekor.
/// </summary>
public sealed record ManifestAnchorResult
{
/// <summary>
/// Whether anchoring was successful.
/// </summary>
public required bool IsSuccess { get; init; }
/// <summary>
/// The anchored manifest with Rekor linkage.
/// </summary>
public ScoringManifest? AnchoredManifest { get; init; }
/// <summary>
/// The Rekor linkage details.
/// </summary>
public RekorLinkage? Linkage { get; init; }
/// <summary>
/// Error message if anchoring failed.
/// </summary>
public string? Error { get; init; }
public static ManifestAnchorResult Success(ScoringManifest manifest, RekorLinkage linkage) => new()
{
IsSuccess = true,
AnchoredManifest = manifest,
Linkage = linkage
};
public static ManifestAnchorResult Fail(string error) => new()
{
IsSuccess = false,
Error = error
};
}
/// <summary>
/// Result of verifying a manifest anchor.
/// </summary>
public sealed record ManifestAnchorVerificationResult
{
/// <summary>
/// Whether verification was successful.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Verified Rekor UUID.
/// </summary>
public string? VerifiedUuid { get; init; }
/// <summary>
/// Verified log index.
/// </summary>
public long? VerifiedLogIndex { get; init; }
/// <summary>
/// Verified integrated time.
/// </summary>
public DateTimeOffset? VerifiedIntegratedTime { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
public static ManifestAnchorVerificationResult Success(string uuid, long logIndex, DateTimeOffset integratedTime) => new()
{
IsValid = true,
VerifiedUuid = uuid,
VerifiedLogIndex = logIndex,
VerifiedIntegratedTime = integratedTime
};
public static ManifestAnchorVerificationResult Fail(string error) => new()
{
IsValid = false,
Error = error
};
}
/// <summary>
/// Abstraction for Rekor submission to allow testing without full Attestor dependency.
/// </summary>
public interface IRekorSubmissionClient
{
/// <summary>
/// Submits a manifest to Rekor.
/// </summary>
Task<ManifestRekorSubmissionResponse> SubmitAsync(
ManifestRekorSubmissionRequest request,
string rekorUrl,
CancellationToken ct = default);
}
/// <summary>
/// Request for submitting a manifest to Rekor.
/// </summary>
public sealed class ManifestRekorSubmissionRequest
{
public required string PayloadType { get; init; }
public required string PayloadBase64 { get; init; }
public required IReadOnlyList<ManifestRekorSignature> Signatures { get; init; }
public required string BundleSha256 { get; init; }
public required string ArtifactKind { get; init; }
public required string ArtifactSha256 { get; init; }
}
/// <summary>
/// Signature entry for Rekor submission.
/// </summary>
public sealed class ManifestRekorSignature
{
public required string KeyId { get; init; }
public required string Signature { get; init; }
}
/// <summary>
/// Response from Rekor submission.
/// </summary>
public sealed class ManifestRekorSubmissionResponse
{
public required bool Success { get; init; }
public string Uuid { get; init; } = string.Empty;
public long LogIndex { get; init; }
public long IntegratedTime { get; init; }
public ManifestRekorProof? Proof { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Inclusion proof from Rekor submission.
/// </summary>
public sealed class ManifestRekorProof
{
public required long TreeSize { get; init; }
public required string RootHash { get; init; }
public required IReadOnlyList<string> Hashes { get; init; }
public required string LogId { get; init; }
}
/// <summary>
/// Stub Rekor client for testing.
/// </summary>
public sealed class StubRekorSubmissionClient : IRekorSubmissionClient
{
private readonly TimeProvider _timeProvider;
private long _logIndex;
public StubRekorSubmissionClient() : this(TimeProvider.System)
{
}
public StubRekorSubmissionClient(TimeProvider timeProvider)
{
_timeProvider = timeProvider;
}
public Task<ManifestRekorSubmissionResponse> SubmitAsync(
ManifestRekorSubmissionRequest request,
string rekorUrl,
CancellationToken ct = default)
{
var logIndex = Interlocked.Increment(ref _logIndex);
var uuid = ComputeUuid(request.BundleSha256);
var integratedTime = _timeProvider.GetUtcNow().ToUnixTimeSeconds();
var response = new ManifestRekorSubmissionResponse
{
Success = true,
Uuid = uuid,
LogIndex = logIndex,
IntegratedTime = integratedTime,
Proof = new ManifestRekorProof
{
TreeSize = logIndex,
RootHash = request.BundleSha256,
Hashes = ImmutableArray<string>.Empty,
LogId = new Uri(rekorUrl).Host
}
};
return Task.FromResult(response);
}
private static string ComputeUuid(string digest)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(digest));
return new Guid(hash.AsSpan(0, 16)).ToString();
}
}

View File

@@ -0,0 +1,396 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-004 - Scoring Manifest DSSE Signing
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.DeltaVerdict.Manifest;
namespace StellaOps.DeltaVerdict.Signing;
/// <summary>
/// Service for DSSE signing of scoring manifests.
/// </summary>
public interface IScoringManifestSigningService
{
/// <summary>
/// Signs a scoring manifest using the provided options.
/// </summary>
/// <param name="manifest">The manifest to sign.</param>
/// <param name="options">Signing options including key and algorithm.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The manifest with digest and DSSE signature populated.</returns>
Task<ScoringManifest> SignAsync(
ScoringManifest manifest,
ManifestSigningOptions options,
CancellationToken ct = default);
/// <summary>
/// Verifies the DSSE signature on a scoring manifest.
/// </summary>
/// <param name="manifest">The manifest to verify.</param>
/// <param name="options">Verification options including expected key.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Verification result indicating success or failure.</returns>
Task<ManifestVerificationResult> VerifyAsync(
ScoringManifest manifest,
ManifestVerificationOptions options,
CancellationToken ct = default);
/// <summary>
/// Computes the canonical digest for a manifest without signing.
/// </summary>
/// <param name="manifest">The manifest to digest.</param>
/// <returns>SHA-256 hex digest prefixed with "sha256:".</returns>
string ComputeDigest(ScoringManifest manifest);
/// <summary>
/// Gets the canonical JSON representation of a manifest.
/// </summary>
/// <param name="manifest">The manifest to serialize.</param>
/// <returns>Canonical JSON string.</returns>
string GetCanonicalJson(ScoringManifest manifest);
}
/// <summary>
/// DSSE signing service for scoring manifests.
/// Implements PAE (Pre-Authentication Encoding) per DSSE specification.
/// </summary>
public sealed class ScoringManifestSigningService : IScoringManifestSigningService
{
/// <summary>
/// DSSE payload type for scoring manifests.
/// </summary>
public const string PayloadType = "application/vnd.stella.scoring-manifest.v1+json";
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
private static readonly JsonSerializerOptions CanonicalSerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower
};
/// <inheritdoc />
public Task<ScoringManifest> SignAsync(
ScoringManifest manifest,
ManifestSigningOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(options);
ct.ThrowIfCancellationRequested();
// First compute the manifest digest
var manifestWithDigest = manifest with
{
ManifestDigest = ComputeDigest(manifest),
DsseSignature = null // Clear any existing signature
};
// Build payload from canonical JSON (excluding signature fields)
var payloadJson = GetCanonicalJson(manifestWithDigest);
var payloadBytes = Encoding.UTF8.GetBytes(payloadJson);
// Build DSSE envelope
var envelope = BuildEnvelope(payloadBytes, options);
var envelopeJson = JsonSerializer.Serialize(envelope, JsonOptions);
// Return manifest with signature
var signedManifest = manifestWithDigest with { DsseSignature = envelopeJson };
return Task.FromResult(signedManifest);
}
/// <inheritdoc />
public Task<ManifestVerificationResult> VerifyAsync(
ScoringManifest manifest,
ManifestVerificationOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(options);
ct.ThrowIfCancellationRequested();
// Check for signature
if (string.IsNullOrEmpty(manifest.DsseSignature))
{
return Task.FromResult(ManifestVerificationResult.Fail("Manifest is not signed"));
}
// Parse DSSE envelope
ManifestDsseEnvelope? envelope;
try
{
envelope = JsonSerializer.Deserialize<ManifestDsseEnvelope>(manifest.DsseSignature, JsonOptions);
}
catch (JsonException ex)
{
return Task.FromResult(ManifestVerificationResult.Fail($"Invalid signature envelope: {ex.Message}"));
}
if (envelope is null)
{
return Task.FromResult(ManifestVerificationResult.Fail("Signature envelope is empty"));
}
// Verify payload type
if (!string.Equals(envelope.PayloadType, PayloadType, StringComparison.Ordinal))
{
return Task.FromResult(ManifestVerificationResult.Fail(
$"Invalid payload type: expected '{PayloadType}', got '{envelope.PayloadType}'"));
}
// Decode and verify signature
var payloadBytes = Convert.FromBase64String(envelope.Payload);
var pae = BuildPae(envelope.PayloadType, payloadBytes);
var expectedSig = ComputeSignature(pae, options.Algorithm, options.SecretBase64);
var matched = envelope.Signatures.Any(sig =>
string.Equals(sig.KeyId, options.KeyId, StringComparison.Ordinal)
&& string.Equals(sig.Sig, expectedSig, StringComparison.Ordinal));
if (!matched)
{
return Task.FromResult(ManifestVerificationResult.Fail("Signature verification failed"));
}
// Verify manifest digest if present
if (!string.IsNullOrEmpty(manifest.ManifestDigest))
{
var computedDigest = ComputeDigest(manifest);
if (!string.Equals(computedDigest, manifest.ManifestDigest, StringComparison.OrdinalIgnoreCase))
{
return Task.FromResult(ManifestVerificationResult.Fail("Manifest digest mismatch"));
}
}
// Verify payload matches current canonical form
var currentCanonicalJson = GetCanonicalJson(manifest);
var currentPayloadJson = Encoding.UTF8.GetString(payloadBytes);
if (!string.Equals(currentCanonicalJson, currentPayloadJson, StringComparison.Ordinal))
{
return Task.FromResult(ManifestVerificationResult.Fail("Manifest content has been modified"));
}
return Task.FromResult(ManifestVerificationResult.Success(options.KeyId));
}
/// <inheritdoc />
public string ComputeDigest(ScoringManifest manifest)
{
var canonicalJson = GetCanonicalJson(manifest);
var canonicalBytes = CanonJson.Canonicalize(canonicalJson, CanonicalSerializerOptions);
var hash = CanonJson.Sha256Hex(canonicalBytes);
return $"sha256:{hash}";
}
/// <inheritdoc />
public string GetCanonicalJson(ScoringManifest manifest)
{
// Build projection excluding signature-related fields to avoid circular dependency
var projection = new
{
schema_version = manifest.SchemaVersion,
scoring_version = manifest.ScoringVersion,
weights = new
{
cvss_base = manifest.Weights.CvssBase,
epss = manifest.Weights.Epss,
reachability = manifest.Weights.Reachability,
exploit_maturity = manifest.Weights.ExploitMaturity,
patch_proof_confidence = manifest.Weights.PatchProofConfidence
},
normalizers = new
{
cvss_range = new { min = manifest.Normalizers.CvssRange.Min, max = manifest.Normalizers.CvssRange.Max },
epss_range = new { min = manifest.Normalizers.EpssRange.Min, max = manifest.Normalizers.EpssRange.Max },
reachability_range = new { min = manifest.Normalizers.ReachabilityRange.Min, max = manifest.Normalizers.ReachabilityRange.Max },
exploit_maturity_range = new { min = manifest.Normalizers.ExploitMaturityRange.Min, max = manifest.Normalizers.ExploitMaturityRange.Max }
},
trusted_vex_keys = manifest.TrustedVexKeys.ToArray(),
code_hash = manifest.CodeHash,
created_at = manifest.CreatedAt.ToString("o")
};
return CanonJson.Serialize(projection, CanonicalSerializerOptions);
}
private static ManifestDsseEnvelope BuildEnvelope(byte[] payload, ManifestSigningOptions options)
{
var pae = BuildPae(PayloadType, payload);
var signature = ComputeSignature(pae, options.Algorithm, options.SecretBase64);
return new ManifestDsseEnvelope(
PayloadType,
Convert.ToBase64String(payload),
[new ManifestDsseSignature(options.KeyId, signature)]);
}
private static string ComputeSignature(byte[] pae, ManifestSigningAlgorithm algorithm, string? secretBase64)
{
return algorithm switch
{
ManifestSigningAlgorithm.HmacSha256 => ComputeHmac(pae, secretBase64),
ManifestSigningAlgorithm.Sha256 => Convert.ToBase64String(SHA256.HashData(pae)),
_ => throw new InvalidOperationException($"Unsupported signing algorithm: {algorithm}")
};
}
private static string ComputeHmac(byte[] data, string? secretBase64)
{
if (string.IsNullOrWhiteSpace(secretBase64))
{
throw new InvalidOperationException("HMAC signing requires a base64 secret.");
}
var secret = Convert.FromBase64String(secretBase64);
var sig = HMACSHA256.HashData(secret, data);
return Convert.ToBase64String(sig);
}
/// <summary>
/// Builds the DSSE Pre-Authentication Encoding (PAE).
/// PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload
/// </summary>
private static byte[] BuildPae(string payloadType, byte[] payload)
{
const string prefix = "DSSEv1";
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var prefixBytes = Encoding.UTF8.GetBytes(prefix);
var lengthType = Encoding.UTF8.GetBytes(typeBytes.Length.ToString());
var lengthPayload = Encoding.UTF8.GetBytes(payload.Length.ToString());
using var stream = new MemoryStream();
stream.Write(prefixBytes);
stream.WriteByte((byte)' ');
stream.Write(lengthType);
stream.WriteByte((byte)' ');
stream.Write(typeBytes);
stream.WriteByte((byte)' ');
stream.Write(lengthPayload);
stream.WriteByte((byte)' ');
stream.Write(payload);
return stream.ToArray();
}
}
/// <summary>
/// Options for signing a scoring manifest.
/// </summary>
public sealed record ManifestSigningOptions
{
/// <summary>
/// Key identifier for the signing key.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Signing algorithm to use.
/// </summary>
public ManifestSigningAlgorithm Algorithm { get; init; } = ManifestSigningAlgorithm.HmacSha256;
/// <summary>
/// Base64-encoded secret for HMAC signing.
/// </summary>
public string? SecretBase64 { get; init; }
}
/// <summary>
/// Options for verifying a scoring manifest signature.
/// </summary>
public sealed record ManifestVerificationOptions
{
/// <summary>
/// Expected key identifier.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Algorithm used for signing.
/// </summary>
public ManifestSigningAlgorithm Algorithm { get; init; } = ManifestSigningAlgorithm.HmacSha256;
/// <summary>
/// Base64-encoded secret for HMAC verification.
/// </summary>
public string? SecretBase64 { get; init; }
}
/// <summary>
/// Supported signing algorithms for manifests.
/// </summary>
public enum ManifestSigningAlgorithm
{
/// <summary>
/// HMAC-SHA256 (development/testing).
/// </summary>
HmacSha256,
/// <summary>
/// SHA-256 hash only (no key, for testing).
/// </summary>
Sha256
}
/// <summary>
/// Result of manifest signature verification.
/// </summary>
public sealed record ManifestVerificationResult
{
/// <summary>
/// Whether verification was successful.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Key ID that verified the signature (if successful).
/// </summary>
public string? VerifiedKeyId { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Creates a successful verification result.
/// </summary>
public static ManifestVerificationResult Success(string keyId) => new()
{
IsValid = true,
VerifiedKeyId = keyId
};
/// <summary>
/// Creates a failed verification result.
/// </summary>
public static ManifestVerificationResult Fail(string error) => new()
{
IsValid = false,
Error = error
};
}
/// <summary>
/// DSSE envelope for scoring manifests.
/// </summary>
public sealed record ManifestDsseEnvelope(
[property: JsonPropertyName("payloadType")] string PayloadType,
[property: JsonPropertyName("payload")] string Payload,
[property: JsonPropertyName("signatures")] IReadOnlyList<ManifestDsseSignature> Signatures);
/// <summary>
/// DSSE signature entry.
/// </summary>
public sealed record ManifestDsseSignature(
[property: JsonPropertyName("keyid")] string KeyId,
[property: JsonPropertyName("sig")] string Sig);

View File

@@ -7,7 +7,14 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\StellaOps.IssuerDirectory.Client\StellaOps.IssuerDirectory.Client.csproj" />
<ProjectReference Include="..\..\Signals\StellaOps.Signals\StellaOps.Signals.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,220 @@
// -----------------------------------------------------------------------------
// IScoringTrustProvider.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-004 - Integrate IssuerDirectory with Scoring Manifest
// Description: Interface bridging IssuerDirectory to scoring for VEX trust verification
// -----------------------------------------------------------------------------
using StellaOps.DeltaVerdict.Inputs;
namespace StellaOps.DeltaVerdict.Trust;
/// <summary>
/// Provider for scoring trust verification.
/// Bridges the IssuerDirectory to scoring manifest for authoritative VEX override validation.
/// </summary>
public interface IScoringTrustProvider
{
/// <summary>
/// Checks if an issuer is trusted for authoritative VEX overrides.
/// </summary>
/// <param name="issuerId">The issuer identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>True if the issuer is trusted for automatic score overrides.</returns>
ValueTask<bool> IsTrustedForOverrideAsync(string issuerId, CancellationToken ct = default);
/// <summary>
/// Gets the trust level for an issuer.
/// </summary>
/// <param name="issuerId">The issuer identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The issuer's trust level.</returns>
ValueTask<TrustLevel> GetTrustLevelAsync(string issuerId, CancellationToken ct = default);
/// <summary>
/// Verifies VEX signature and checks issuer trust.
/// </summary>
/// <param name="vexDocument">The VEX document to verify.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Result containing signature validity and trust assessment.</returns>
ValueTask<VexTrustResult> VerifyAndTrustAsync(VexDocument vexDocument, CancellationToken ct = default);
/// <summary>
/// Checks if a signing key fingerprint is in the trusted keys roster.
/// </summary>
/// <param name="keyFingerprint">The key fingerprint (SHA-256 of SPKI).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>True if the key is trusted.</returns>
ValueTask<bool> IsKeyTrustedAsync(string keyFingerprint, CancellationToken ct = default);
/// <summary>
/// Gets all trusted key fingerprints for the current tenant/context.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>Set of trusted key fingerprints.</returns>
ValueTask<IReadOnlySet<string>> GetTrustedKeyFingerprintsAsync(CancellationToken ct = default);
}
/// <summary>
/// Result of VEX trust verification.
/// </summary>
public sealed record VexTrustResult
{
/// <summary>
/// Whether the VEX signature was present and valid.
/// </summary>
public required bool SignatureValid { get; init; }
/// <summary>
/// Whether the VEX issuer is trusted.
/// </summary>
public required bool IssuerTrusted { get; init; }
/// <summary>
/// Trust level of the issuer.
/// </summary>
public required TrustLevel TrustLevel { get; init; }
/// <summary>
/// Composite trust score from IssuerDirectory [0, 1].
/// </summary>
public required decimal CompositeScore { get; init; }
/// <summary>
/// Issuer identifier.
/// </summary>
public string? IssuerId { get; init; }
/// <summary>
/// Signing key fingerprint.
/// </summary>
public string? KeyFingerprint { get; init; }
/// <summary>
/// VEX status from the document.
/// </summary>
public string? VexStatus { get; init; }
/// <summary>
/// Verification timestamp (UTC).
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// Verification error message if failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Whether this VEX allows automatic score override.
/// Requires valid signature from trusted vendor+ issuer.
/// </summary>
public bool AllowsAutomaticOverride =>
SignatureValid && IssuerTrusted && TrustLevel >= TrustLevel.Vendor;
/// <summary>
/// Creates a failed verification result.
/// </summary>
public static VexTrustResult Failed(string errorMessage) => new()
{
SignatureValid = false,
IssuerTrusted = false,
TrustLevel = TrustLevel.Unknown,
CompositeScore = 0m,
ErrorMessage = errorMessage
};
/// <summary>
/// Creates an unsigned VEX result.
/// </summary>
public static VexTrustResult Unsigned(string? issuerId = null) => new()
{
SignatureValid = false,
IssuerTrusted = false,
TrustLevel = TrustLevel.Community,
CompositeScore = 0.3m, // Low confidence for unsigned
IssuerId = issuerId
};
}
/// <summary>
/// VEX document for trust verification.
/// </summary>
public sealed record VexDocument
{
/// <summary>
/// VEX document identifier.
/// </summary>
public required string DocumentId { get; init; }
/// <summary>
/// VEX status (e.g., "not_affected", "affected", "fixed").
/// </summary>
public required string Status { get; init; }
/// <summary>
/// Issuer identifier.
/// </summary>
public string? IssuerId { get; init; }
/// <summary>
/// Raw document content for signature verification.
/// </summary>
public byte[]? RawContent { get; init; }
/// <summary>
/// Signature if present.
/// </summary>
public VexSignature? Signature { get; init; }
/// <summary>
/// CVE IDs covered by this VEX statement.
/// </summary>
public IReadOnlyList<string>? CveIds { get; init; }
/// <summary>
/// Product identifiers (PURL, CPE, etc.).
/// </summary>
public IReadOnlyList<string>? ProductIds { get; init; }
/// <summary>
/// VEX statement timestamp.
/// </summary>
public DateTimeOffset? StatementTimestamp { get; init; }
}
/// <summary>
/// VEX document signature.
/// </summary>
public sealed record VexSignature
{
/// <summary>
/// Signature format (e.g., "PGP", "PKCS7", "JWS", "DSSE").
/// </summary>
public required string Format { get; init; }
/// <summary>
/// Base64-encoded signature value.
/// </summary>
public required string Value { get; init; }
/// <summary>
/// Signing key ID or fingerprint.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Signer identity (email, DN, etc.).
/// </summary>
public string? Signer { get; init; }
/// <summary>
/// Signing timestamp.
/// </summary>
public DateTimeOffset? SignedAt { get; init; }
/// <summary>
/// Certificate chain if present.
/// </summary>
public IReadOnlyList<string>? CertificateChain { get; init; }
}

View File

@@ -0,0 +1,361 @@
// -----------------------------------------------------------------------------
// ScoringTrustProvider.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-004 - Integrate IssuerDirectory with Scoring Manifest
// Description: Implementation bridging IssuerDirectory to scoring
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.DeltaVerdict.Inputs;
using StellaOps.DeltaVerdict.Manifest;
using StellaOps.IssuerDirectory.Client;
namespace StellaOps.DeltaVerdict.Trust;
/// <summary>
/// Implementation of IScoringTrustProvider using IssuerDirectory.
/// </summary>
public sealed class ScoringTrustProvider : IScoringTrustProvider
{
private readonly IIssuerDirectoryClient _issuerDirectoryClient;
private readonly IVexSignatureVerifier? _signatureVerifier;
private readonly IOptions<ScoringTrustProviderOptions> _options;
private readonly ILogger<ScoringTrustProvider> _logger;
/// <summary>
/// Creates a new ScoringTrustProvider.
/// </summary>
public ScoringTrustProvider(
IIssuerDirectoryClient issuerDirectoryClient,
IOptions<ScoringTrustProviderOptions> options,
ILogger<ScoringTrustProvider> logger,
IVexSignatureVerifier? signatureVerifier = null)
{
_issuerDirectoryClient = issuerDirectoryClient ?? throw new ArgumentNullException(nameof(issuerDirectoryClient));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_signatureVerifier = signatureVerifier;
}
/// <inheritdoc />
public async ValueTask<bool> IsTrustedForOverrideAsync(string issuerId, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(issuerId))
return false;
try
{
var trust = await _issuerDirectoryClient.GetIssuerTrustAsync(
_options.Value.TenantId,
issuerId,
includeGlobal: true,
ct).ConfigureAwait(false);
// Trusted for override if effective weight >= threshold
return trust.EffectiveWeight >= _options.Value.OverrideTrustThreshold;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to get trust for issuer {IssuerId}", issuerId);
return false;
}
}
/// <inheritdoc />
public async ValueTask<TrustLevel> GetTrustLevelAsync(string issuerId, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(issuerId))
return TrustLevel.Unknown;
try
{
var trust = await _issuerDirectoryClient.GetIssuerTrustAsync(
_options.Value.TenantId,
issuerId,
includeGlobal: true,
ct).ConfigureAwait(false);
return MapWeightToTrustLevel(trust.EffectiveWeight);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to get trust level for issuer {IssuerId}", issuerId);
return TrustLevel.Unknown;
}
}
/// <inheritdoc />
public async ValueTask<VexTrustResult> VerifyAndTrustAsync(VexDocument vexDocument, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(vexDocument);
// Check signature if present
bool signatureValid = false;
string? keyFingerprint = null;
if (vexDocument.Signature != null && _signatureVerifier != null)
{
try
{
var verifyResult = await _signatureVerifier.VerifyAsync(
vexDocument.RawContent ?? [],
vexDocument.Signature,
ct).ConfigureAwait(false);
signatureValid = verifyResult.IsValid;
keyFingerprint = verifyResult.KeyFingerprint;
if (!signatureValid)
{
_logger.LogWarning(
"VEX signature verification failed for document {DocumentId}: {Error}",
vexDocument.DocumentId, verifyResult.ErrorMessage);
return VexTrustResult.Failed(verifyResult.ErrorMessage ?? "Signature verification failed");
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "VEX signature verification error for document {DocumentId}", vexDocument.DocumentId);
return VexTrustResult.Failed(ex.Message);
}
}
else if (vexDocument.Signature == null)
{
// Unsigned VEX - lower trust
_logger.LogDebug("VEX document {DocumentId} is unsigned", vexDocument.DocumentId);
return VexTrustResult.Unsigned(vexDocument.IssuerId);
}
// Check issuer trust
string? issuerId = vexDocument.IssuerId;
TrustLevel trustLevel = TrustLevel.Unknown;
decimal compositeScore = 0m;
bool issuerTrusted = false;
if (!string.IsNullOrWhiteSpace(issuerId))
{
try
{
var trust = await _issuerDirectoryClient.GetIssuerTrustAsync(
_options.Value.TenantId,
issuerId,
includeGlobal: true,
ct).ConfigureAwait(false);
compositeScore = trust.EffectiveWeight;
trustLevel = MapWeightToTrustLevel(compositeScore);
issuerTrusted = compositeScore >= _options.Value.OverrideTrustThreshold;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to verify issuer trust for {IssuerId}", issuerId);
}
}
// Check if key is in trusted roster
if (keyFingerprint != null)
{
var keyTrusted = await IsKeyTrustedAsync(keyFingerprint, ct).ConfigureAwait(false);
if (!keyTrusted && _options.Value.RequireTrustedKey)
{
_logger.LogWarning(
"VEX signing key {KeyFingerprint} not in trusted roster for document {DocumentId}",
keyFingerprint, vexDocument.DocumentId);
return new VexTrustResult
{
SignatureValid = signatureValid,
IssuerTrusted = false,
TrustLevel = TrustLevel.Community,
CompositeScore = Math.Min(compositeScore, 0.5m), // Cap at community level
IssuerId = issuerId,
KeyFingerprint = keyFingerprint,
VexStatus = vexDocument.Status,
ErrorMessage = "Signing key not in trusted roster"
};
}
}
return new VexTrustResult
{
SignatureValid = signatureValid,
IssuerTrusted = issuerTrusted,
TrustLevel = trustLevel,
CompositeScore = compositeScore,
IssuerId = issuerId,
KeyFingerprint = keyFingerprint,
VexStatus = vexDocument.Status
};
}
/// <inheritdoc />
public async ValueTask<bool> IsKeyTrustedAsync(string keyFingerprint, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(keyFingerprint))
return false;
var trustedKeys = await GetTrustedKeyFingerprintsAsync(ct).ConfigureAwait(false);
return trustedKeys.Contains(keyFingerprint);
}
/// <inheritdoc />
public async ValueTask<IReadOnlySet<string>> GetTrustedKeyFingerprintsAsync(CancellationToken ct = default)
{
var fingerprints = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
// Add keys from options (manifest-configured)
foreach (var key in _options.Value.TrustedKeyFingerprints)
{
fingerprints.Add(key);
}
// Add keys from IssuerDirectory for trusted issuers
foreach (var issuerId in _options.Value.TrustedIssuerIds)
{
try
{
var keys = await _issuerDirectoryClient.GetIssuerKeysAsync(
_options.Value.TenantId,
issuerId,
includeGlobal: true,
ct).ConfigureAwait(false);
foreach (var key in keys)
{
// Only include active keys
if (key.Status == "active" &&
key.RevokedAtUtc == null &&
(key.ExpiresAtUtc == null || key.ExpiresAtUtc > DateTimeOffset.UtcNow))
{
fingerprints.Add(key.Fingerprint);
}
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to get keys for issuer {IssuerId}", issuerId);
}
}
return fingerprints;
}
/// <summary>
/// Maps IssuerDirectory weight [0, 1] to TrustLevel enum.
/// </summary>
private static TrustLevel MapWeightToTrustLevel(decimal weight) => weight switch
{
>= 0.9m => TrustLevel.Authoritative,
>= 0.7m => TrustLevel.Vendor,
>= 0.4m => TrustLevel.Community,
> 0m => TrustLevel.Untrusted,
_ => TrustLevel.Unknown
};
}
/// <summary>
/// Options for ScoringTrustProvider.
/// </summary>
public sealed record ScoringTrustProviderOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Scoring:Trust";
/// <summary>
/// Tenant ID for IssuerDirectory queries.
/// </summary>
public string TenantId { get; init; } = "default";
/// <summary>
/// Trust weight threshold for automatic override [0, 1].
/// Issuers with weight >= threshold are trusted for automatic score overrides.
/// Default: 0.7 (Vendor level).
/// </summary>
public decimal OverrideTrustThreshold { get; init; } = 0.7m;
/// <summary>
/// Whether to require the signing key in the trusted roster.
/// If true, signatures from unknown keys are not trusted even if issuer is trusted.
/// </summary>
public bool RequireTrustedKey { get; init; } = true;
/// <summary>
/// Trusted VEX key fingerprints from manifest.
/// These keys are always trusted regardless of IssuerDirectory.
/// </summary>
public IReadOnlyList<string> TrustedKeyFingerprints { get; init; } = [];
/// <summary>
/// Trusted issuer IDs whose active keys should be trusted.
/// </summary>
public IReadOnlyList<string> TrustedIssuerIds { get; init; } = [];
}
/// <summary>
/// Interface for VEX signature verification.
/// Implementations may use PGP, PKCS7, JWS, or DSSE.
/// </summary>
public interface IVexSignatureVerifier
{
/// <summary>
/// Verifies a VEX signature.
/// </summary>
ValueTask<SignatureVerificationResult> VerifyAsync(
byte[] content,
VexSignature signature,
CancellationToken ct = default);
}
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record SignatureVerificationResult
{
/// <summary>
/// Whether the signature is valid.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Signing key fingerprint if extracted.
/// </summary>
public string? KeyFingerprint { get; init; }
/// <summary>
/// Signer identity if extracted.
/// </summary>
public string? Signer { get; init; }
/// <summary>
/// Signing timestamp if extracted.
/// </summary>
public DateTimeOffset? SignedAt { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Creates a valid result.
/// </summary>
public static SignatureVerificationResult Valid(string keyFingerprint, string? signer = null) => new()
{
IsValid = true,
KeyFingerprint = keyFingerprint,
Signer = signer
};
/// <summary>
/// Creates an invalid result.
/// </summary>
public static SignatureVerificationResult Invalid(string errorMessage) => new()
{
IsValid = false,
ErrorMessage = errorMessage
};
}

View File

@@ -0,0 +1,80 @@
// -----------------------------------------------------------------------------
// TrustServiceCollectionExtensions.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-004 - Integrate IssuerDirectory with Scoring Manifest
// Description: DI registration for trust provider services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.DeltaVerdict.Trust;
/// <summary>
/// Extension methods for registering trust provider services.
/// </summary>
public static class TrustServiceCollectionExtensions
{
/// <summary>
/// Adds the scoring trust provider to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">Optional configuration.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddScoringTrustProvider(
this IServiceCollection services,
IConfiguration? configuration = null)
{
if (configuration != null)
{
services.Configure<ScoringTrustProviderOptions>(
configuration.GetSection(ScoringTrustProviderOptions.SectionName));
}
services.AddSingleton<IScoringTrustProvider, ScoringTrustProvider>();
return services;
}
/// <summary>
/// Adds the scoring trust provider with custom options.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Options configuration delegate.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddScoringTrustProvider(
this IServiceCollection services,
Action<ScoringTrustProviderOptions> configureOptions)
{
services.Configure(configureOptions);
services.AddSingleton<IScoringTrustProvider, ScoringTrustProvider>();
return services;
}
/// <summary>
/// Adds the scoring trust provider with manifest-configured trusted keys.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="trustedKeyFingerprints">Trusted VEX key fingerprints from manifest.</param>
/// <param name="trustedIssuerIds">Trusted issuer IDs.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddScoringTrustProvider(
this IServiceCollection services,
IEnumerable<string> trustedKeyFingerprints,
IEnumerable<string>? trustedIssuerIds = null)
{
services.Configure<ScoringTrustProviderOptions>(options =>
{
options = options with
{
TrustedKeyFingerprints = trustedKeyFingerprints.ToList(),
TrustedIssuerIds = trustedIssuerIds?.ToList() ?? []
};
});
services.AddSingleton<IScoringTrustProvider, ScoringTrustProvider>();
return services;
}
}

View File

@@ -0,0 +1,414 @@
// -----------------------------------------------------------------------------
// AdversarialInputValidator.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-006 - Implement Adversarial Input Validation
// Description: Implementation of adversarial input validation with confidence discounting
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.DeltaVerdict.Inputs;
using StellaOps.DeltaVerdict.Trust;
namespace StellaOps.DeltaVerdict.Validation;
/// <summary>
/// Implementation of IAdversarialInputValidator.
/// Validates inputs and applies confidence discounts for untrusted signals.
/// </summary>
public sealed class AdversarialInputValidator : IAdversarialInputValidator
{
private readonly IScoringTrustProvider _trustProvider;
private readonly ILogger<AdversarialInputValidator> _logger;
/// <summary>
/// Creates a new AdversarialInputValidator.
/// </summary>
public AdversarialInputValidator(
IScoringTrustProvider trustProvider,
ILogger<AdversarialInputValidator> logger)
{
_trustProvider = trustProvider ?? throw new ArgumentNullException(nameof(trustProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async ValueTask<ValidatedScoringInputs> ValidateAsync(
PinnedScoringInputs inputs,
AdversarialValidationPolicy policy,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(inputs);
ArgumentNullException.ThrowIfNull(policy);
var auditEntries = new List<ValidationAuditEntry>();
// Validate CVSS
double cvssConfidence = 1.0;
if (inputs.CvssScores != null)
{
cvssConfidence = ValidateCvss(inputs.CvssScores, policy, auditEntries);
}
// Validate EPSS
double epssConfidence = 1.0;
if (inputs.EpssScores != null)
{
epssConfidence = ValidateEpss(inputs.EpssScores, policy, auditEntries);
}
// Validate Reachability
double reachabilityConfidence = 1.0;
if (inputs.Reachability != null)
{
reachabilityConfidence = ValidateReachability(inputs.Reachability, policy, auditEntries);
}
// Validate VEX
double vexConfidence = 1.0;
bool vexAllowsOverride = false;
if (inputs.VexStatements != null && inputs.VexStatements.Count > 0)
{
var vexResult = await ValidateVexStatementsAsync(
inputs.VexStatements, policy, auditEntries, ct).ConfigureAwait(false);
vexConfidence = vexResult.Confidence;
vexAllowsOverride = vexResult.AllowsOverride;
}
// Determine overall status
var status = DetermineStatus(
cvssConfidence, epssConfidence, reachabilityConfidence, vexConfidence,
policy.MinConfidenceThreshold);
if (policy.EnableAuditLog && auditEntries.Count > 0)
{
_logger.LogInformation(
"Adversarial validation applied {Count} confidence adjustments: {Adjustments}",
auditEntries.Count,
string.Join("; ", auditEntries.Select(e => $"{e.InputType}:{e.Reason}={e.FinalConfidence:F2}")));
}
return new ValidatedScoringInputs
{
Original = inputs,
CvssConfidence = cvssConfidence,
EpssConfidence = epssConfidence,
ReachabilityConfidence = reachabilityConfidence,
VexConfidence = vexConfidence,
VexAllowsOverride = vexAllowsOverride,
AuditEntries = auditEntries,
Status = status
};
}
/// <inheritdoc />
public async ValueTask<VexTrustResult> ValidateVexAsync(
PinnedInput<object> vexInput,
AdversarialValidationPolicy policy,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(vexInput);
ArgumentNullException.ThrowIfNull(policy);
// Check signature
if (vexInput.Signature == null || !vexInput.Signature.Present)
{
return VexTrustResult.Unsigned(vexInput.Source);
}
if (!vexInput.Signature.Valid)
{
return VexTrustResult.Failed("VEX signature invalid");
}
// Check key trust
var keyFingerprint = vexInput.Signature.KeyId;
if (keyFingerprint != null)
{
var keyTrusted = await _trustProvider.IsKeyTrustedAsync(keyFingerprint, ct).ConfigureAwait(false);
if (!keyTrusted)
{
return new VexTrustResult
{
SignatureValid = true,
IssuerTrusted = false,
TrustLevel = TrustLevel.Community,
CompositeScore = (decimal)policy.UntrustedVexKeyDiscount,
KeyFingerprint = keyFingerprint,
ErrorMessage = "VEX signing key not in trusted roster"
};
}
}
// Check issuer trust
var issuerId = vexInput.Source;
if (!string.IsNullOrEmpty(issuerId))
{
var trustLevel = await _trustProvider.GetTrustLevelAsync(issuerId, ct).ConfigureAwait(false);
var isTrusted = await _trustProvider.IsTrustedForOverrideAsync(issuerId, ct).ConfigureAwait(false);
return new VexTrustResult
{
SignatureValid = true,
IssuerTrusted = isTrusted,
TrustLevel = trustLevel,
CompositeScore = MapTrustLevelToScore(trustLevel),
IssuerId = issuerId,
KeyFingerprint = keyFingerprint
};
}
return new VexTrustResult
{
SignatureValid = true,
IssuerTrusted = false,
TrustLevel = vexInput.Signature.TrustLevel,
CompositeScore = MapTrustLevelToScore(vexInput.Signature.TrustLevel),
KeyFingerprint = keyFingerprint
};
}
private double ValidateCvss(
PinnedInput<IReadOnlyDictionary<string, double>> cvssInput,
AdversarialValidationPolicy policy,
List<ValidationAuditEntry> auditEntries)
{
double confidence = 1.0;
// Check freshness
if (cvssInput.IsExpired)
{
confidence *= policy.StaleCvssDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "cvss",
Reason = "stale",
OriginalConfidence = 1.0,
DiscountApplied = policy.StaleCvssDiscount,
FinalConfidence = confidence
});
}
// Check signature if present
if (cvssInput.Signature != null && !cvssInput.Signature.Valid)
{
confidence *= 0.8; // 20% discount for invalid signature
auditEntries.Add(new ValidationAuditEntry
{
InputType = "cvss",
Reason = "invalid_signature",
OriginalConfidence = confidence / 0.8,
DiscountApplied = 0.8,
FinalConfidence = confidence
});
}
return Math.Max(confidence, 0);
}
private double ValidateEpss(
PinnedInput<IReadOnlyDictionary<string, double>> epssInput,
AdversarialValidationPolicy policy,
List<ValidationAuditEntry> auditEntries)
{
double confidence = 1.0;
// Check freshness (EPSS has strict 7-day TTL)
if (epssInput.IsExpired)
{
confidence *= policy.StaleEpssDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "epss",
Reason = "stale",
OriginalConfidence = 1.0,
DiscountApplied = policy.StaleEpssDiscount,
FinalConfidence = confidence
});
_logger.LogWarning(
"EPSS data is stale (expired {ExpiredAgo} ago), applying {Discount:P0} discount",
epssInput.TimeRemaining?.Negate(),
policy.StaleEpssDiscount);
}
return Math.Max(confidence, 0);
}
private double ValidateReachability(
PinnedInput<object> reachabilityInput,
AdversarialValidationPolicy policy,
List<ValidationAuditEntry> auditEntries)
{
double confidence = 1.0;
// Check freshness
if (reachabilityInput.IsExpired)
{
confidence *= policy.StaleReachabilityDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "reachability",
Reason = "stale",
OriginalConfidence = 1.0,
DiscountApplied = policy.StaleReachabilityDiscount,
FinalConfidence = confidence
});
}
// Check for DSSE signature
if (reachabilityInput.Signature == null || !reachabilityInput.Signature.Present)
{
confidence *= policy.UnsignedReachabilityDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "reachability",
Reason = "unsigned",
OriginalConfidence = confidence / policy.UnsignedReachabilityDiscount,
DiscountApplied = policy.UnsignedReachabilityDiscount,
FinalConfidence = confidence
});
}
else if (!reachabilityInput.Signature.Valid)
{
confidence *= 0.5; // Heavy discount for invalid signature
auditEntries.Add(new ValidationAuditEntry
{
InputType = "reachability",
Reason = "invalid_signature",
OriginalConfidence = confidence / 0.5,
DiscountApplied = 0.5,
FinalConfidence = confidence
});
}
return Math.Max(confidence, 0);
}
private async ValueTask<(double Confidence, bool AllowsOverride)> ValidateVexStatementsAsync(
IReadOnlyList<PinnedInput<object>> vexStatements,
AdversarialValidationPolicy policy,
List<ValidationAuditEntry> auditEntries,
CancellationToken ct)
{
double maxConfidence = 0;
bool anyAllowsOverride = false;
foreach (var vex in vexStatements)
{
double vexConfidence = 1.0;
// Check freshness
if (vex.IsExpired)
{
vexConfidence *= policy.StaleVexDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "vex",
Reason = "stale",
OriginalConfidence = 1.0,
DiscountApplied = policy.StaleVexDiscount,
FinalConfidence = vexConfidence
});
}
// Check signature
if (vex.Signature == null || !vex.Signature.Present)
{
vexConfidence *= policy.UnsignedVexDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "vex",
Reason = "unsigned",
OriginalConfidence = vexConfidence / policy.UnsignedVexDiscount,
DiscountApplied = policy.UnsignedVexDiscount,
FinalConfidence = vexConfidence
});
}
else if (!vex.Signature.Valid)
{
vexConfidence *= 0.3; // Heavy discount for invalid signature
auditEntries.Add(new ValidationAuditEntry
{
InputType = "vex",
Reason = "invalid_signature",
OriginalConfidence = vexConfidence / 0.3,
DiscountApplied = 0.3,
FinalConfidence = vexConfidence
});
}
else
{
// Valid signature - check key trust
var keyFingerprint = vex.Signature.KeyId;
if (keyFingerprint != null)
{
var keyTrusted = await _trustProvider.IsKeyTrustedAsync(keyFingerprint, ct).ConfigureAwait(false);
if (!keyTrusted)
{
vexConfidence *= policy.UntrustedVexKeyDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "vex",
Reason = "untrusted_key",
OriginalConfidence = vexConfidence / policy.UntrustedVexKeyDiscount,
DiscountApplied = policy.UntrustedVexKeyDiscount,
FinalConfidence = vexConfidence
});
}
}
// Check trust level
if (vex.Signature.TrustLevel == TrustLevel.Community)
{
vexConfidence *= policy.CommunityVexDiscount;
auditEntries.Add(new ValidationAuditEntry
{
InputType = "vex",
Reason = "community_source",
OriginalConfidence = vexConfidence / policy.CommunityVexDiscount,
DiscountApplied = policy.CommunityVexDiscount,
FinalConfidence = vexConfidence
});
}
}
// Check if this VEX allows override
if (vexConfidence >= policy.VexOverrideMinConfidence)
{
anyAllowsOverride = true;
}
maxConfidence = Math.Max(maxConfidence, vexConfidence);
}
return (maxConfidence, anyAllowsOverride);
}
private static ValidationStatus DetermineStatus(
double cvss, double epss, double reachability, double vex,
double threshold)
{
var minConfidence = Math.Min(Math.Min(cvss, epss), Math.Min(reachability, vex));
var avgConfidence = (cvss + epss + reachability + vex) / 4;
if (minConfidence >= 0.9)
return ValidationStatus.Valid;
if (minConfidence < threshold)
return ValidationStatus.Failed;
if (avgConfidence < 0.6)
return ValidationStatus.PartialFailure;
return ValidationStatus.Degraded;
}
private static decimal MapTrustLevelToScore(TrustLevel level) => level switch
{
TrustLevel.Authoritative => 1.0m,
TrustLevel.Vendor => 0.9m,
TrustLevel.Community => 0.5m,
TrustLevel.Untrusted => 0.2m,
_ => 0.0m
};
}

View File

@@ -0,0 +1,314 @@
// -----------------------------------------------------------------------------
// IAdversarialInputValidator.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-006 - Implement Adversarial Input Validation
// Description: Validates inputs and applies confidence discounts for untrusted signals
// -----------------------------------------------------------------------------
using StellaOps.DeltaVerdict.Inputs;
using StellaOps.DeltaVerdict.Trust;
namespace StellaOps.DeltaVerdict.Validation;
/// <summary>
/// Validates scoring inputs and applies confidence discounts for untrusted signals.
/// Per advisory: "validate reachability evidence (artifact hashes/runtime witness DSSE);
/// downgrade untrusted signals"
/// </summary>
public interface IAdversarialInputValidator
{
/// <summary>
/// Validates pinned scoring inputs and applies confidence adjustments.
/// </summary>
/// <param name="inputs">The pinned scoring inputs to validate.</param>
/// <param name="policy">Validation policy with discount rules.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Validated inputs with confidence adjustments applied.</returns>
ValueTask<ValidatedScoringInputs> ValidateAsync(
PinnedScoringInputs inputs,
AdversarialValidationPolicy policy,
CancellationToken ct = default);
/// <summary>
/// Validates a single VEX statement and returns trust result.
/// </summary>
/// <param name="vexInput">The VEX pinned input.</param>
/// <param name="policy">Validation policy.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>VEX trust verification result.</returns>
ValueTask<VexTrustResult> ValidateVexAsync(
PinnedInput<object> vexInput,
AdversarialValidationPolicy policy,
CancellationToken ct = default);
}
/// <summary>
/// Policy for adversarial input validation.
/// Defines confidence discounts for various untrusted input scenarios.
/// </summary>
public sealed record AdversarialValidationPolicy
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Scoring:AdversarialValidation";
#region VEX Confidence Discounts
/// <summary>
/// Confidence multiplier for unsigned VEX statements [0, 1].
/// Applied when VEX has no signature.
/// Default: 0.5 (50% confidence).
/// </summary>
public double UnsignedVexDiscount { get; init; } = 0.5;
/// <summary>
/// Confidence multiplier for VEX signed by untrusted keys [0, 1].
/// Applied when VEX has signature but key is not in trusted roster.
/// Default: 0.3 (30% confidence).
/// </summary>
public double UntrustedVexKeyDiscount { get; init; } = 0.3;
/// <summary>
/// Confidence multiplier for VEX from community (non-vendor) sources [0, 1].
/// Default: 0.6 (60% confidence).
/// </summary>
public double CommunityVexDiscount { get; init; } = 0.6;
/// <summary>
/// Confidence multiplier for stale VEX (past TTL) [0, 1].
/// Default: 0.7 (70% confidence).
/// </summary>
public double StaleVexDiscount { get; init; } = 0.7;
#endregion
#region Reachability Confidence Discounts
/// <summary>
/// Confidence multiplier for unsigned reachability evidence [0, 1].
/// Applied when reachability proof has no DSSE signature.
/// Default: 0.7 (70% confidence).
/// </summary>
public double UnsignedReachabilityDiscount { get; init; } = 0.7;
/// <summary>
/// Confidence multiplier for reachability without runtime witness [0, 1].
/// Static analysis only.
/// Default: 0.8 (80% confidence).
/// </summary>
public double StaticOnlyReachabilityDiscount { get; init; } = 0.8;
/// <summary>
/// Confidence multiplier for stale reachability (past TTL) [0, 1].
/// Default: 0.6 (60% confidence).
/// </summary>
public double StaleReachabilityDiscount { get; init; } = 0.6;
#endregion
#region EPSS/CVSS Confidence Discounts
/// <summary>
/// Confidence multiplier for stale EPSS scores (past 7-day TTL) [0, 1].
/// Default: 0.5 (50% confidence).
/// </summary>
public double StaleEpssDiscount { get; init; } = 0.5;
/// <summary>
/// Confidence multiplier for stale CVSS scores (past TTL) [0, 1].
/// Default: 0.8 (80% confidence - CVSS changes less frequently).
/// </summary>
public double StaleCvssDiscount { get; init; } = 0.8;
#endregion
#region Thresholds
/// <summary>
/// Minimum confidence threshold [0, 1].
/// Inputs with confidence below this threshold use default values.
/// Default: 0.2 (20%).
/// </summary>
public double MinConfidenceThreshold { get; init; } = 0.2;
/// <summary>
/// Minimum confidence for VEX to allow automatic score override [0, 1].
/// Default: 0.8 (80% - requires signed vendor VEX).
/// </summary>
public double VexOverrideMinConfidence { get; init; } = 0.8;
#endregion
#region Default Values
/// <summary>
/// Default CVSS score when input is discarded (below threshold).
/// Default: 5.0 (medium severity).
/// </summary>
public double DefaultCvss { get; init; } = 5.0;
/// <summary>
/// Default EPSS score when input is discarded.
/// Default: 0.1 (10% exploitation probability).
/// </summary>
public double DefaultEpss { get; init; } = 0.1;
/// <summary>
/// Default reachability level when input is discarded.
/// Default: 0.5 (unknown/possible).
/// </summary>
public double DefaultReachability { get; init; } = 0.5;
#endregion
/// <summary>
/// Whether to enable audit logging of discount applications.
/// </summary>
public bool EnableAuditLog { get; init; } = true;
/// <summary>
/// Default policy with conservative settings.
/// </summary>
public static AdversarialValidationPolicy Default => new();
/// <summary>
/// Strict policy requiring higher confidence for all inputs.
/// </summary>
public static AdversarialValidationPolicy Strict => new()
{
UnsignedVexDiscount = 0.3,
UntrustedVexKeyDiscount = 0.1,
CommunityVexDiscount = 0.4,
UnsignedReachabilityDiscount = 0.5,
MinConfidenceThreshold = 0.4,
VexOverrideMinConfidence = 0.9
};
/// <summary>
/// Lenient policy for testing/development.
/// </summary>
public static AdversarialValidationPolicy Lenient => new()
{
UnsignedVexDiscount = 0.8,
UntrustedVexKeyDiscount = 0.5,
CommunityVexDiscount = 0.8,
UnsignedReachabilityDiscount = 0.9,
MinConfidenceThreshold = 0.1,
VexOverrideMinConfidence = 0.5
};
}
/// <summary>
/// Validated scoring inputs with confidence adjustments.
/// </summary>
public sealed record ValidatedScoringInputs
{
/// <summary>
/// Original pinned inputs.
/// </summary>
public required PinnedScoringInputs Original { get; init; }
/// <summary>
/// Adjusted CVSS confidence [0, 1].
/// </summary>
public double CvssConfidence { get; init; } = 1.0;
/// <summary>
/// Adjusted EPSS confidence [0, 1].
/// </summary>
public double EpssConfidence { get; init; } = 1.0;
/// <summary>
/// Adjusted reachability confidence [0, 1].
/// </summary>
public double ReachabilityConfidence { get; init; } = 1.0;
/// <summary>
/// Adjusted VEX confidence [0, 1].
/// </summary>
public double VexConfidence { get; init; } = 1.0;
/// <summary>
/// Whether VEX allows automatic score override.
/// </summary>
public bool VexAllowsOverride { get; init; }
/// <summary>
/// Validation audit entries.
/// </summary>
public IReadOnlyList<ValidationAuditEntry> AuditEntries { get; init; } = [];
/// <summary>
/// Overall validation status.
/// </summary>
public ValidationStatus Status { get; init; } = ValidationStatus.Valid;
/// <summary>
/// Minimum confidence across all inputs.
/// </summary>
public double MinConfidence => Math.Min(Math.Min(CvssConfidence, EpssConfidence),
Math.Min(ReachabilityConfidence, VexConfidence));
/// <summary>
/// Whether all inputs meet minimum confidence threshold.
/// </summary>
public bool AllInputsValid(double threshold) =>
CvssConfidence >= threshold &&
EpssConfidence >= threshold &&
ReachabilityConfidence >= threshold;
}
/// <summary>
/// Audit entry for validation discount.
/// </summary>
public sealed record ValidationAuditEntry
{
/// <summary>
/// Input type that was discounted.
/// </summary>
public required string InputType { get; init; }
/// <summary>
/// Reason for discount.
/// </summary>
public required string Reason { get; init; }
/// <summary>
/// Original confidence before discount.
/// </summary>
public double OriginalConfidence { get; init; }
/// <summary>
/// Discount multiplier applied.
/// </summary>
public double DiscountApplied { get; init; }
/// <summary>
/// Final confidence after discount.
/// </summary>
public double FinalConfidence { get; init; }
/// <summary>
/// Validation timestamp.
/// </summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Overall validation status.
/// </summary>
public enum ValidationStatus
{
/// <summary>All inputs validated successfully.</summary>
Valid,
/// <summary>Some inputs have reduced confidence.</summary>
Degraded,
/// <summary>Some inputs failed validation (below threshold).</summary>
PartialFailure,
/// <summary>Critical validation failure.</summary>
Failed
}

View File

@@ -0,0 +1,70 @@
// -----------------------------------------------------------------------------
// ValidationServiceCollectionExtensions.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-006 - Implement Adversarial Input Validation
// Description: DI registration for adversarial validation services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.DeltaVerdict.Validation;
/// <summary>
/// Extension methods for registering adversarial validation services.
/// </summary>
public static class ValidationServiceCollectionExtensions
{
/// <summary>
/// Adds adversarial input validation to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">Optional configuration.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddAdversarialInputValidation(
this IServiceCollection services,
IConfiguration? configuration = null)
{
if (configuration != null)
{
services.Configure<AdversarialValidationPolicy>(
configuration.GetSection(AdversarialValidationPolicy.SectionName));
}
services.AddSingleton<IAdversarialInputValidator, AdversarialInputValidator>();
return services;
}
/// <summary>
/// Adds adversarial input validation with custom policy.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configurePolicy">Policy configuration delegate.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddAdversarialInputValidation(
this IServiceCollection services,
Action<AdversarialValidationPolicy> configurePolicy)
{
services.Configure(configurePolicy);
services.AddSingleton<IAdversarialInputValidator, AdversarialInputValidator>();
return services;
}
/// <summary>
/// Adds adversarial input validation with a predefined policy.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="policy">The validation policy to use.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddAdversarialInputValidation(
this IServiceCollection services,
AdversarialValidationPolicy policy)
{
services.AddSingleton(Microsoft.Extensions.Options.Options.Create(policy));
services.AddSingleton<IAdversarialInputValidator, AdversarialInputValidator>();
return services;
}
}

View File

@@ -0,0 +1 @@
{"cvss_scores":{"retrieved_at":"2026-01-18T10:00:00+00:00","source":"nvd.nist.gov","source_digest":"sha256:def789ghi012","ttl":"90.00:00:00","value":{"CVE-2024-1234":7.5,"CVE-2024-5678":4.3}},"epss_scores":{"retrieved_at":"2026-01-18T10:00:00+00:00","source":"first.org/epss","source_digest":"sha256:abc123def456","ttl":"7.00:00:00","value":{"CVE-2024-1234":0.42,"CVE-2024-5678":0.15}}}

View File

@@ -0,0 +1 @@
placeholder_digest_will_be_computed_on_first_run

View File

@@ -0,0 +1 @@
{"cvss_scores":{"retrieved_at":"2026-01-18T10:00:00+00:00","signature":{"format":"JWS","key_id":"sha256:nvd_key_2024","present":true,"signed_at":"2026-01-18T09:55:00+00:00","signer":"nvd@nist.gov","trust_level":"authoritative","valid":true},"source":"nvd.nist.gov","source_digest":"sha256:cvss_digest_001","ttl":"90.00:00:00","value":{"CVE-2024-1234":7.5}},"epss_scores":{"retrieved_at":"2026-01-18T10:00:00+00:00","source":"first.org/epss","source_digest":"sha256:epss_digest_001","source_version":"2026-01-17","ttl":"7.00:00:00","value":{"CVE-2024-1234":0.42}},"kev_entries":{"retrieved_at":"2026-01-18T06:00:00+00:00","source":"cisa.gov/known-exploited-vulnerabilities","source_digest":"sha256:kev_digest_001","ttl":"1.00:00:00","value":["CVE-2024-9999"]},"reachability":{"retrieved_at":"2026-01-18T11:00:00+00:00","source":"stellaops/reachability","source_digest":"sha256:reach_digest_001","source_version":"2.1.0","ttl":"7.00:00:00","value":{"confidence":0.85,"reachable":true}},"sbom":{"retrieved_at":"2026-01-18T09:00:00+00:00","source":"scanner/trivy","source_digest":"sha256:sbom_digest_001","ttl":"30.00:00:00","value":{"components":42}},"trusted_vex_keys":[{"algorithm":"EC-P256","fingerprint":"sha256:vendor_a_key","issuer":"Vendor A Security Team","trust_level":"vendor","valid_from":"2025-01-01T00:00:00+00:00","valid_until":"2027-01-01T00:00:00+00:00"}],"vex_statements":[{"retrieved_at":"2026-01-18T08:00:00+00:00","signature":{"format":"DSSE","key_id":"sha256:vendor_a_key","present":true,"trust_level":"vendor","valid":true},"source":"vendor-a.com/vex","source_digest":"sha256:vex_digest_001","ttl":"30.00:00:00","value":{"justification":"vulnerable_code_not_present","status":"not_affected"}}]}

View File

@@ -0,0 +1 @@
{"bundle_id":"golden-bundle-001","created_at":"2026-01-18T12:00:00+00:00","inputs":{"cvss_scores":{"retrieved_at":"2026-01-18T10:00:00+00:00","source":"nvd.nist.gov","source_digest":"sha256:def789ghi012","ttl":"90.00:00:00","value":{"CVE-2024-1234":7.5,"CVE-2024-5678":4.3}},"epss_scores":{"retrieved_at":"2026-01-18T10:00:00+00:00","source":"first.org/epss","source_digest":"sha256:abc123def456","ttl":"7.00:00:00","value":{"CVE-2024-1234":0.42,"CVE-2024-5678":0.15}}},"schema_version":"1.0.0","validation":{"combined_confidence":0.95,"stale_input_count":0,"status":"valid","unsigned_input_count":2,"vex_override_allowed":false}}

View File

@@ -0,0 +1,397 @@
// -----------------------------------------------------------------------------
// VerdictInputsGoldenSnapshotTests.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-008 - VerdictInputs Serialization with Provenance
// Description: Golden snapshot tests for VerdictInputsSerializer determinism
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using FluentAssertions;
using StellaOps.DeltaVerdict.Inputs;
using StellaOps.DeltaVerdict.Serialization;
using Xunit;
namespace StellaOps.DeltaVerdict.Tests.Serialization;
/// <summary>
/// Golden snapshot tests to ensure serialization determinism across versions.
/// These tests verify that the same inputs always produce the same JSON output.
/// </summary>
public sealed class VerdictInputsGoldenSnapshotTests
{
private static readonly string GoldenSnapshotsDir = Path.Combine(
AppContext.BaseDirectory,
"Serialization",
"GoldenSnapshots");
/// <summary>
/// Known digest for the canonical test inputs.
/// If this changes, it indicates a breaking change in serialization format.
/// </summary>
private const string KnownTestInputsDigest = "pinned_scoring_inputs_v1";
[Fact]
public void Serialize_MatchesGoldenSnapshot_BasicInputs()
{
var inputs = CreateGoldenInputs();
var json = VerdictInputsSerializer.Serialize(inputs);
var expectedJson = GetExpectedGoldenJson("basic_inputs.json");
// Parse both to compare structure (ignoring whitespace)
var actualDoc = JsonDocument.Parse(json);
var expectedDoc = JsonDocument.Parse(expectedJson);
CompareJsonDocuments(actualDoc, expectedDoc).Should().BeTrue(
"Serialized JSON should match golden snapshot. " +
"If this test fails after intentional changes, update the golden file.");
}
[Fact]
public void Serialize_MatchesGoldenSnapshot_FullInputs()
{
var inputs = CreateFullGoldenInputs();
var json = VerdictInputsSerializer.Serialize(inputs);
var expectedJson = GetExpectedGoldenJson("full_inputs.json");
var actualDoc = JsonDocument.Parse(json);
var expectedDoc = JsonDocument.Parse(expectedJson);
CompareJsonDocuments(actualDoc, expectedDoc).Should().BeTrue(
"Serialized JSON should match golden snapshot for full inputs.");
}
[Fact]
public void ComputeDigest_MatchesGoldenDigest()
{
var inputs = CreateGoldenInputs();
var digest = VerdictInputsSerializer.ComputeDigest(inputs);
// This digest is computed from the golden snapshot and should never change
// for the same input structure
var expectedDigest = GetExpectedGoldenDigest("basic_inputs_digest.txt");
digest.Should().Be(expectedDigest,
"Digest should match golden value. " +
"If this test fails, serialization format has changed which breaks replay.");
}
[Fact]
public void Serialize_IsDeterministicAcrossMultipleCalls()
{
var inputs = CreateGoldenInputs();
var results = new List<string>();
// Serialize 100 times to catch any non-determinism
for (int i = 0; i < 100; i++)
{
results.Add(VerdictInputsSerializer.Serialize(inputs));
}
results.Distinct().Should().HaveCount(1,
"Serialization must be deterministic - all calls should produce identical output");
}
[Fact]
public void ComputeDigest_IsDeterministicAcrossMultipleCalls()
{
var inputs = CreateGoldenInputs();
var results = new List<string>();
for (int i = 0; i < 100; i++)
{
results.Add(VerdictInputsSerializer.ComputeDigest(inputs));
}
results.Distinct().Should().HaveCount(1,
"Digest computation must be deterministic");
}
[Fact]
public void VerdictBundle_MatchesGoldenSnapshot()
{
var bundle = CreateGoldenBundle();
var json = VerdictBundleSerializer.Serialize(bundle);
var expectedJson = GetExpectedGoldenJson("verdict_bundle.json");
var actualDoc = JsonDocument.Parse(json);
var expectedDoc = JsonDocument.Parse(expectedJson);
CompareJsonDocuments(actualDoc, expectedDoc).Should().BeTrue(
"VerdictBundle serialization should match golden snapshot.");
}
#region Golden File Helpers
private static string GetExpectedGoldenJson(string filename)
{
var path = Path.Combine(GoldenSnapshotsDir, filename);
if (!File.Exists(path))
{
// Auto-generate golden file if it doesn't exist (for initial setup)
var json = filename switch
{
"basic_inputs.json" => VerdictInputsSerializer.Serialize(CreateGoldenInputs()),
"full_inputs.json" => VerdictInputsSerializer.Serialize(CreateFullGoldenInputs()),
"verdict_bundle.json" => VerdictBundleSerializer.Serialize(CreateGoldenBundle()),
_ => throw new ArgumentException($"Unknown golden file: {filename}")
};
Directory.CreateDirectory(GoldenSnapshotsDir);
File.WriteAllText(path, json + Environment.NewLine);
return json;
}
return File.ReadAllText(path).Trim();
}
private static string GetExpectedGoldenDigest(string filename)
{
var path = Path.Combine(GoldenSnapshotsDir, filename);
if (!File.Exists(path))
{
// Auto-generate golden digest if it doesn't exist
var digest = VerdictInputsSerializer.ComputeDigest(CreateGoldenInputs());
Directory.CreateDirectory(GoldenSnapshotsDir);
File.WriteAllText(path, digest + Environment.NewLine);
return digest;
}
return File.ReadAllText(path).Trim();
}
private static bool CompareJsonDocuments(JsonDocument actual, JsonDocument expected)
{
return CompareJsonElements(actual.RootElement, expected.RootElement);
}
private static bool CompareJsonElements(JsonElement actual, JsonElement expected)
{
if (actual.ValueKind != expected.ValueKind)
{
return false;
}
return actual.ValueKind switch
{
JsonValueKind.Object => CompareJsonObjects(actual, expected),
JsonValueKind.Array => CompareJsonArrays(actual, expected),
JsonValueKind.String => actual.GetString() == expected.GetString(),
JsonValueKind.Number => actual.GetRawText() == expected.GetRawText(),
JsonValueKind.True or JsonValueKind.False => actual.GetBoolean() == expected.GetBoolean(),
JsonValueKind.Null => true,
_ => false
};
}
private static bool CompareJsonObjects(JsonElement actual, JsonElement expected)
{
var actualProps = actual.EnumerateObject().ToDictionary(p => p.Name, p => p.Value);
var expectedProps = expected.EnumerateObject().ToDictionary(p => p.Name, p => p.Value);
if (actualProps.Count != expectedProps.Count)
{
return false;
}
foreach (var kvp in expectedProps)
{
if (!actualProps.TryGetValue(kvp.Key, out var actualValue))
{
return false;
}
if (!CompareJsonElements(actualValue, kvp.Value))
{
return false;
}
}
return true;
}
private static bool CompareJsonArrays(JsonElement actual, JsonElement expected)
{
var actualItems = actual.EnumerateArray().ToList();
var expectedItems = expected.EnumerateArray().ToList();
if (actualItems.Count != expectedItems.Count)
{
return false;
}
for (int i = 0; i < actualItems.Count; i++)
{
if (!CompareJsonElements(actualItems[i], expectedItems[i]))
{
return false;
}
}
return true;
}
#endregion
#region Golden Test Data
/// <summary>
/// Creates a canonical set of test inputs for golden snapshot comparison.
/// WARNING: Do not modify these values without updating the golden files!
/// </summary>
private static PinnedScoringInputs CreateGoldenInputs()
{
return new PinnedScoringInputs
{
EpssScores = new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 0.42,
["CVE-2024-5678"] = 0.15
},
SourceDigest = "sha256:abc123def456",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "first.org/epss",
Ttl = TimeSpan.FromDays(7)
},
CvssScores = new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 7.5,
["CVE-2024-5678"] = 4.3
},
SourceDigest = "sha256:def789ghi012",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "nvd.nist.gov",
Ttl = TimeSpan.FromDays(90)
}
};
}
/// <summary>
/// Creates a full set of test inputs with all fields populated.
/// </summary>
private static PinnedScoringInputs CreateFullGoldenInputs()
{
return new PinnedScoringInputs
{
Sbom = new PinnedInput<object>
{
Value = new { components = 42 },
SourceDigest = "sha256:sbom_digest_001",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T09:00:00Z"),
Source = "scanner/trivy",
Ttl = TimeSpan.FromDays(30)
},
EpssScores = new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 0.42
},
SourceDigest = "sha256:epss_digest_001",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "first.org/epss",
SourceVersion = "2026-01-17",
Ttl = TimeSpan.FromDays(7)
},
CvssScores = new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 7.5
},
SourceDigest = "sha256:cvss_digest_001",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "nvd.nist.gov",
Ttl = TimeSpan.FromDays(90),
Signature = new InputSignature
{
Present = true,
Valid = true,
Format = "JWS",
KeyId = "sha256:nvd_key_2024",
Signer = "nvd@nist.gov",
SignedAt = DateTimeOffset.Parse("2026-01-18T09:55:00Z"),
TrustLevel = TrustLevel.Authoritative
}
},
Reachability = new PinnedInput<object>
{
Value = new { reachable = true, confidence = 0.85 },
SourceDigest = "sha256:reach_digest_001",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T11:00:00Z"),
Source = "stellaops/reachability",
SourceVersion = "2.1.0",
Ttl = TimeSpan.FromDays(7)
},
VexStatements = new List<PinnedInput<object>>
{
new PinnedInput<object>
{
Value = new { status = "not_affected", justification = "vulnerable_code_not_present" },
SourceDigest = "sha256:vex_digest_001",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T08:00:00Z"),
Source = "vendor-a.com/vex",
Ttl = TimeSpan.FromDays(30),
Signature = new InputSignature
{
Present = true,
Valid = true,
Format = "DSSE",
KeyId = "sha256:vendor_a_key",
TrustLevel = TrustLevel.Vendor
}
}
},
KevEntries = new PinnedInput<IReadOnlySet<string>>
{
Value = new HashSet<string> { "CVE-2024-9999" },
SourceDigest = "sha256:kev_digest_001",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T06:00:00Z"),
Source = "cisa.gov/known-exploited-vulnerabilities",
Ttl = TimeSpan.FromDays(1)
},
TrustedVexKeys = new List<TrustedKeyEntry>
{
new TrustedKeyEntry
{
Fingerprint = "sha256:vendor_a_key",
Issuer = "Vendor A Security Team",
Algorithm = "EC-P256",
ValidFrom = DateTimeOffset.Parse("2025-01-01T00:00:00Z"),
ValidUntil = DateTimeOffset.Parse("2027-01-01T00:00:00Z"),
TrustLevel = TrustLevel.Vendor
}
}
};
}
private static VerdictBundle CreateGoldenBundle()
{
return new VerdictBundle
{
BundleId = "golden-bundle-001",
CreatedAt = DateTimeOffset.Parse("2026-01-18T12:00:00Z"),
Inputs = CreateGoldenInputs(),
Validation = new ValidationSummary
{
Status = ValidationStatus.Valid,
CombinedConfidence = 0.95,
StaleInputCount = 0,
UnsignedInputCount = 2,
VexOverrideAllowed = false
}
};
}
#endregion
}

View File

@@ -0,0 +1,457 @@
// -----------------------------------------------------------------------------
// VerdictInputsSerializerTests.cs
// Sprint: SPRINT_20260118_031_LIB_input_pinning_trusted_vex_keys
// Task: TASK-031-008 - VerdictInputs Serialization with Provenance
// Description: Unit tests for VerdictInputsSerializer
// -----------------------------------------------------------------------------
using FluentAssertions;
using StellaOps.DeltaVerdict.Inputs;
using StellaOps.DeltaVerdict.Serialization;
using Xunit;
namespace StellaOps.DeltaVerdict.Tests.Serialization;
public sealed class VerdictInputsSerializerTests
{
#region Snake Case Naming Tests
[Fact]
public void Serialize_UsesSnakeCaseNaming()
{
var inputs = CreateTestInputs();
var json = VerdictInputsSerializer.Serialize(inputs);
// Verify snake_case property names
json.Should().Contain("\"source_digest\"");
json.Should().Contain("\"retrieved_at\"");
json.Should().Contain("\"epss_scores\"");
json.Should().Contain("\"cvss_scores\"");
json.Should().Contain("\"vex_statements\"");
json.Should().Contain("\"kev_entries\"");
json.Should().Contain("\"trusted_vex_keys\"");
json.Should().Contain("\"manifest_digest\"");
// Verify PascalCase NOT used
json.Should().NotContain("\"SourceDigest\"");
json.Should().NotContain("\"RetrievedAt\"");
json.Should().NotContain("\"EpssScores\"");
}
[Fact]
public void Serialize_UsesSnakeCaseForNestedObjects()
{
var inputs = CreateTestInputsWithSignature();
var json = VerdictInputsSerializer.Serialize(inputs);
// Verify nested object property names
json.Should().Contain("\"key_id\"");
json.Should().Contain("\"trust_level\"");
json.Should().Contain("\"signed_at\"");
json.Should().Contain("\"source_version\"");
}
[Fact]
public void Serialize_UsesSnakeCaseForEnumValues()
{
var inputs = CreateTestInputsWithSignature();
var json = VerdictInputsSerializer.Serialize(inputs);
// Verify enum values are snake_case
json.Should().Contain("\"authoritative\"").Or.Contain("\"vendor\"").Or.Contain("\"community\"");
}
#endregion
#region Round-Trip Tests
[Fact]
public void SerializeDeserialize_RoundTripsBasicInputs()
{
var inputs = CreateTestInputs();
var json = VerdictInputsSerializer.Serialize(inputs);
var deserialized = VerdictInputsSerializer.Deserialize(json);
deserialized.Should().NotBeNull();
deserialized.EpssScores.Should().NotBeNull();
deserialized.EpssScores!.SourceDigest.Should().Be(inputs.EpssScores!.SourceDigest);
deserialized.EpssScores.RetrievedAt.Should().Be(inputs.EpssScores.RetrievedAt);
deserialized.EpssScores.Source.Should().Be(inputs.EpssScores.Source);
}
[Fact]
public void SerializeDeserialize_RoundTripsWithSignature()
{
var inputs = CreateTestInputsWithSignature();
var json = VerdictInputsSerializer.Serialize(inputs);
var deserialized = VerdictInputsSerializer.Deserialize(json);
deserialized.Should().NotBeNull();
deserialized.CvssScores.Should().NotBeNull();
deserialized.CvssScores!.Signature.Should().NotBeNull();
deserialized.CvssScores.Signature!.KeyId.Should().Be(inputs.CvssScores!.Signature!.KeyId);
deserialized.CvssScores.Signature.TrustLevel.Should().Be(inputs.CvssScores.Signature.TrustLevel);
deserialized.CvssScores.Signature.Valid.Should().Be(inputs.CvssScores.Signature.Valid);
}
[Fact]
public void SerializeDeserialize_RoundTripsWithTrustedKeys()
{
var inputs = CreateTestInputsWithTrustedKeys();
var json = VerdictInputsSerializer.Serialize(inputs);
var deserialized = VerdictInputsSerializer.Deserialize(json);
deserialized.Should().NotBeNull();
deserialized.TrustedVexKeys.Should().NotBeNull();
deserialized.TrustedVexKeys!.Count.Should().Be(2);
deserialized.TrustedVexKeys[0].Fingerprint.Should().Be("sha256:key1fingerprint");
deserialized.TrustedVexKeys[1].Issuer.Should().Be("Vendor B");
}
[Fact]
public void SerializeDeserialize_PreservesNullFields()
{
var inputs = new PinnedScoringInputs
{
EpssScores = CreateTestEpssInput()
// All other fields are null
};
var json = VerdictInputsSerializer.Serialize(inputs);
var deserialized = VerdictInputsSerializer.Deserialize(json);
deserialized.Sbom.Should().BeNull();
deserialized.CvssScores.Should().BeNull();
deserialized.Reachability.Should().BeNull();
deserialized.VexStatements.Should().BeNull();
deserialized.KevEntries.Should().BeNull();
deserialized.TrustedVexKeys.Should().BeNull();
}
#endregion
#region Digest Tests
[Fact]
public void ComputeDigest_IsDeterministic()
{
var inputs = CreateTestInputs();
var digest1 = VerdictInputsSerializer.ComputeDigest(inputs);
var digest2 = VerdictInputsSerializer.ComputeDigest(inputs);
digest1.Should().Be(digest2);
digest1.Should().HaveLength(64); // SHA-256 hex = 64 chars
digest1.Should().MatchRegex("^[a-f0-9]{64}$");
}
[Fact]
public void ComputePrefixedDigest_HasSha256Prefix()
{
var inputs = CreateTestInputs();
var digest = VerdictInputsSerializer.ComputePrefixedDigest(inputs);
digest.Should().StartWith("sha256:");
digest.Should().HaveLength(71); // "sha256:" (7) + 64 hex chars
}
[Fact]
public void ComputeDigest_ChangesWithContent()
{
var inputs1 = CreateTestInputs();
var inputs2 = inputs1 with
{
EpssScores = inputs1.EpssScores! with
{
SourceDigest = "sha256:different_digest"
}
};
var digest1 = VerdictInputsSerializer.ComputeDigest(inputs1);
var digest2 = VerdictInputsSerializer.ComputeDigest(inputs2);
digest1.Should().NotBe(digest2);
}
[Fact]
public void ComputeDigest_IgnoresManifestDigestField()
{
var inputs1 = CreateTestInputs();
var inputs2 = inputs1 with { ManifestDigest = "sha256:some_digest" };
var digest1 = VerdictInputsSerializer.ComputeDigest(inputs1);
var digest2 = VerdictInputsSerializer.ComputeDigest(inputs2);
// ManifestDigest is mutable and should be excluded from digest computation
digest1.Should().Be(digest2);
}
[Fact]
public void WithDigest_SetsManifestDigest()
{
var inputs = CreateTestInputs();
inputs.ManifestDigest.Should().BeNull();
var withDigest = VerdictInputsSerializer.WithDigest(inputs);
withDigest.ManifestDigest.Should().NotBeNull();
withDigest.ManifestDigest.Should().StartWith("sha256:");
}
[Fact]
public void VerifyDigest_ReturnsTrueForValidDigest()
{
var inputs = VerdictInputsSerializer.WithDigest(CreateTestInputs());
var isValid = VerdictInputsSerializer.VerifyDigest(inputs);
isValid.Should().BeTrue();
}
[Fact]
public void VerifyDigest_ReturnsFalseForTamperedContent()
{
var inputs = VerdictInputsSerializer.WithDigest(CreateTestInputs());
var tampered = inputs with
{
EpssScores = inputs.EpssScores! with
{
SourceDigest = "sha256:tampered"
}
};
var isValid = VerdictInputsSerializer.VerifyDigest(tampered);
isValid.Should().BeFalse();
}
[Fact]
public void VerifyDigest_ReturnsFalseForMissingDigest()
{
var inputs = CreateTestInputs();
var isValid = VerdictInputsSerializer.VerifyDigest(inputs);
isValid.Should().BeFalse();
}
#endregion
#region Canonical JSON Tests
[Fact]
public void Serialize_ProducesCanonicalOutput()
{
var inputs = CreateTestInputs();
var json1 = VerdictInputsSerializer.Serialize(inputs);
var json2 = VerdictInputsSerializer.Serialize(inputs);
// Canonical JSON should be byte-for-byte identical
json1.Should().Be(json2);
}
[Fact]
public void Serialize_OutputIsCompact()
{
var inputs = CreateTestInputs();
var json = VerdictInputsSerializer.Serialize(inputs);
// Canonical JSON has no unnecessary whitespace
json.Should().NotContain("\n");
json.Should().NotContain(" ");
}
[Fact]
public void Serialize_SortsKeysAlphabetically()
{
var inputs = CreateTestInputs();
var json = VerdictInputsSerializer.Serialize(inputs);
// RFC 8785 requires lexicographic key ordering
// "cvss" should come before "epss" which comes before "sbom"
var cvssIndex = json.IndexOf("\"cvss_scores\"");
var epssIndex = json.IndexOf("\"epss_scores\"");
if (cvssIndex > 0 && epssIndex > 0)
{
cvssIndex.Should().BeLessThan(epssIndex);
}
}
#endregion
#region VerdictBundle Tests
[Fact]
public void VerdictBundleSerializer_RoundTrips()
{
var bundle = CreateTestBundle();
var json = VerdictBundleSerializer.Serialize(bundle);
var deserialized = VerdictBundleSerializer.Deserialize(json);
deserialized.BundleId.Should().Be(bundle.BundleId);
deserialized.CreatedAt.Should().Be(bundle.CreatedAt);
deserialized.SchemaVersion.Should().Be(bundle.SchemaVersion);
deserialized.Inputs.Should().NotBeNull();
}
[Fact]
public void VerdictBundleSerializer_DigestVerification()
{
var bundle = VerdictBundleSerializer.WithDigest(CreateTestBundle());
var isValid = VerdictBundleSerializer.VerifyDigest(bundle);
isValid.Should().BeTrue();
}
[Fact]
public void VerdictBundleSerializer_IgnoresBundleDigestInComputation()
{
var bundle1 = CreateTestBundle();
var bundle2 = bundle1 with { BundleDigest = "sha256:existing_digest" };
var digest1 = VerdictBundleSerializer.ComputeDigest(bundle1);
var digest2 = VerdictBundleSerializer.ComputeDigest(bundle2);
digest1.Should().Be(digest2);
}
#endregion
#region Test Helpers
private static PinnedScoringInputs CreateTestInputs()
{
return new PinnedScoringInputs
{
EpssScores = CreateTestEpssInput(),
CvssScores = CreateTestCvssInput()
};
}
private static PinnedScoringInputs CreateTestInputsWithSignature()
{
return new PinnedScoringInputs
{
EpssScores = CreateTestEpssInput(),
CvssScores = CreateTestCvssInputWithSignature()
};
}
private static PinnedScoringInputs CreateTestInputsWithTrustedKeys()
{
return new PinnedScoringInputs
{
EpssScores = CreateTestEpssInput(),
TrustedVexKeys = new List<TrustedKeyEntry>
{
new TrustedKeyEntry
{
Fingerprint = "sha256:key1fingerprint",
Issuer = "Vendor A",
Algorithm = "RSA-4096",
ValidFrom = DateTimeOffset.Parse("2025-01-01T00:00:00Z"),
ValidUntil = DateTimeOffset.Parse("2027-01-01T00:00:00Z"),
TrustLevel = TrustLevel.Vendor
},
new TrustedKeyEntry
{
Fingerprint = "sha256:key2fingerprint",
Issuer = "Vendor B",
Algorithm = "EC-P256",
ValidFrom = DateTimeOffset.Parse("2025-06-01T00:00:00Z"),
TrustLevel = TrustLevel.Authoritative
}
}
};
}
private static PinnedInput<IReadOnlyDictionary<string, double>> CreateTestEpssInput()
{
return new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 0.42,
["CVE-2024-5678"] = 0.15
},
SourceDigest = "sha256:epss_model_2026_01_17",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "first.org/epss",
Ttl = TimeSpan.FromDays(7)
};
}
private static PinnedInput<IReadOnlyDictionary<string, double>> CreateTestCvssInput()
{
return new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 7.5,
["CVE-2024-5678"] = 4.3
},
SourceDigest = "sha256:nvd_response_abc123",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "nvd.nist.gov",
Ttl = TimeSpan.FromDays(90)
};
}
private static PinnedInput<IReadOnlyDictionary<string, double>> CreateTestCvssInputWithSignature()
{
return new PinnedInput<IReadOnlyDictionary<string, double>>
{
Value = new Dictionary<string, double>
{
["CVE-2024-1234"] = 7.5
},
SourceDigest = "sha256:nvd_response_abc123",
RetrievedAt = DateTimeOffset.Parse("2026-01-18T10:00:00Z"),
Source = "nvd.nist.gov",
Ttl = TimeSpan.FromDays(90),
Signature = new InputSignature
{
Present = true,
Valid = true,
Format = "JWS",
KeyId = "sha256:nvd_signing_key_2024",
Signer = "nvd@nist.gov",
SignedAt = DateTimeOffset.Parse("2026-01-18T09:55:00Z"),
TrustLevel = TrustLevel.Authoritative
}
};
}
private static VerdictBundle CreateTestBundle()
{
return new VerdictBundle
{
BundleId = "test-bundle-001",
CreatedAt = DateTimeOffset.Parse("2026-01-18T12:00:00Z"),
Inputs = CreateTestInputs(),
Validation = new ValidationSummary
{
Status = ValidationStatus.Valid,
CombinedConfidence = 0.95,
StaleInputCount = 0,
UnsignedInputCount = 1,
VexOverrideAllowed = false
}
};
}
#endregion
}

View File

@@ -0,0 +1,36 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>StellaOps.DeltaVerdict.Tests</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit" />
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.DeltaVerdict.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="Serialization\GoldenSnapshots\*.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,374 @@
// -----------------------------------------------------------------------------
// CiSystemConnectivityCheck.cs
// Sprint: SPRINT_20260118_018_Doctor_integration_health_expansion
// Task: INTH-003 - Implement CiSystemConnectivityCheck
// Description: Verify connectivity to CI/CD systems (Jenkins, GitLab CI, GitHub Actions, etc.)
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Net.Http;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Verifies connectivity to configured CI/CD systems.
/// Checks authentication, API accessibility, and runner/agent availability.
/// </summary>
public sealed class CiSystemConnectivityCheck : IDoctorCheck
{
private const string PluginId = "stellaops.doctor.integration";
private const string CategoryName = "Integration";
/// <inheritdoc />
public string CheckId => "check.integration.ci.system";
/// <inheritdoc />
public string Name => "CI System Connectivity";
/// <inheritdoc />
public string Description => "Verify connectivity to CI/CD systems (Jenkins, GitLab CI, GitHub Actions, etc.)";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["integration", "ci", "cd", "jenkins", "gitlab", "github"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(15);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var ciConfig = context.Configuration.GetSection("CI");
return ciConfig.Exists() && ciConfig.GetChildren().Any();
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
var ciSystems = GetConfiguredCiSystems(context.Configuration);
if (ciSystems.Count == 0)
{
return builder
.Skip("No CI systems configured")
.WithEvidence("CI Systems", eb => eb.Add("configured_systems", "0"))
.Build();
}
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder
.Skip("IHttpClientFactory not available")
.Build();
}
var httpClient = httpClientFactory.CreateClient("DoctorHealthCheck");
httpClient.Timeout = TimeSpan.FromSeconds(10);
var results = new List<CiSystemResult>();
var unhealthy = new List<string>();
var noRunners = new List<string>();
foreach (var ci in ciSystems)
{
var result = await CheckCiSystemAsync(httpClient, ci, ct);
results.Add(result);
if (!result.Reachable || !result.AuthSuccess)
{
unhealthy.Add(ci.Name);
}
else if (result.AvailableRunners == 0 && result.TotalRunners > 0)
{
noRunners.Add(ci.Name);
}
}
if (unhealthy.Count > 0)
{
return builder
.Fail($"{unhealthy.Count} CI system(s) unreachable or auth failed")
.WithEvidence("CI Systems", eb =>
{
eb.Add("total_systems", ciSystems.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_systems", (ciSystems.Count - unhealthy.Count).ToString(CultureInfo.InvariantCulture));
eb.Add("unhealthy_systems", string.Join(", ", unhealthy));
AddCiEvidence(eb, results);
})
.WithCauses(
"CI system is down",
"Network connectivity issue",
"API credentials expired",
"Firewall blocking access")
.WithRemediation(rb =>
{
rb.AddStep(1, "Test CI system connectivity",
$"stella ci ping {unhealthy[0]}",
CommandType.Shell);
rb.AddStep(2, "Refresh credentials",
$"stella ci auth refresh {unhealthy[0]}",
CommandType.Manual);
})
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (noRunners.Count > 0)
{
return builder
.Warn($"{noRunners.Count} CI system(s) have no available runners")
.WithEvidence("CI Systems", eb =>
{
eb.Add("total_systems", ciSystems.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_systems", ciSystems.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("no_runners_systems", string.Join(", ", noRunners));
AddCiEvidence(eb, results);
})
.WithCauses(
"All runners are busy",
"Runners are offline",
"Runner scaling needed")
.WithRemediation(rb =>
{
rb.AddStep(1, "Check runner status",
$"stella ci runners {noRunners[0]}",
CommandType.Shell);
})
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
return builder
.Pass($"{ciSystems.Count} CI system(s) healthy")
.WithEvidence("CI Systems", eb =>
{
eb.Add("total_systems", ciSystems.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_systems", ciSystems.Count.ToString(CultureInfo.InvariantCulture));
AddCiEvidence(eb, results);
})
.Build();
}
private static async Task<CiSystemResult> CheckCiSystemAsync(HttpClient client, CiSystemConfig ci, CancellationToken ct)
{
var result = new CiSystemResult { Name = ci.Name, Type = ci.Type };
try
{
var healthEndpoint = GetHealthEndpoint(ci);
var request = new HttpRequestMessage(HttpMethod.Get, healthEndpoint);
if (!string.IsNullOrEmpty(ci.ApiToken))
{
// Set auth header based on CI type
if (ci.Type.Equals("github", StringComparison.OrdinalIgnoreCase))
{
request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", ci.ApiToken);
}
else if (ci.Type.Equals("gitlab", StringComparison.OrdinalIgnoreCase))
{
request.Headers.Add("PRIVATE-TOKEN", ci.ApiToken);
}
else
{
request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", ci.ApiToken);
}
}
var sw = System.Diagnostics.Stopwatch.StartNew();
var response = await client.SendAsync(request, ct);
sw.Stop();
result.LatencyMs = (int)sw.ElapsedMilliseconds;
result.Reachable = response.IsSuccessStatusCode;
result.AuthSuccess = response.StatusCode != System.Net.HttpStatusCode.Unauthorized &&
response.StatusCode != System.Net.HttpStatusCode.Forbidden;
// Try to get runner info if available
if (result.Reachable && result.AuthSuccess)
{
await TryGetRunnerInfoAsync(client, ci, result, ct);
}
}
catch (HttpRequestException ex)
{
result.Reachable = false;
result.ErrorMessage = ex.Message;
}
catch (TaskCanceledException)
{
result.Reachable = false;
result.ErrorMessage = "Timeout";
}
return result;
}
private static async Task TryGetRunnerInfoAsync(HttpClient client, CiSystemConfig ci, CiSystemResult result, CancellationToken ct)
{
try
{
var runnersEndpoint = ci.Type.ToLowerInvariant() switch
{
"jenkins" => $"{ci.Url.TrimEnd('/')}/computer/api/json",
"gitlab" => $"{ci.Url.TrimEnd('/')}/api/v4/runners?status=online",
"github" => null, // GitHub runners are per-repo/org, complex to aggregate
_ => null
};
if (runnersEndpoint != null)
{
var request = new HttpRequestMessage(HttpMethod.Get, runnersEndpoint);
if (!string.IsNullOrEmpty(ci.ApiToken))
{
if (ci.Type.Equals("gitlab", StringComparison.OrdinalIgnoreCase))
{
request.Headers.Add("PRIVATE-TOKEN", ci.ApiToken);
}
else
{
request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", ci.ApiToken);
}
}
var response = await client.SendAsync(request, ct);
if (response.IsSuccessStatusCode)
{
var json = await response.Content.ReadAsStringAsync(ct);
ParseRunnerInfo(json, ci.Type, result);
}
}
}
catch { /* Runner info is optional */ }
}
private static void ParseRunnerInfo(string json, string ciType, CiSystemResult result)
{
try
{
using var doc = JsonDocument.Parse(json);
if (ciType.Equals("jenkins", StringComparison.OrdinalIgnoreCase))
{
if (doc.RootElement.TryGetProperty("computer", out var computers) &&
computers.ValueKind == JsonValueKind.Array)
{
result.TotalRunners = computers.GetArrayLength();
result.AvailableRunners = computers.EnumerateArray()
.Count(c => !c.TryGetProperty("offline", out var off) || !off.GetBoolean());
}
}
else if (ciType.Equals("gitlab", StringComparison.OrdinalIgnoreCase))
{
if (doc.RootElement.ValueKind == JsonValueKind.Array)
{
result.TotalRunners = doc.RootElement.GetArrayLength();
result.AvailableRunners = result.TotalRunners; // Already filtered by status=online
}
}
}
catch { }
}
private static string GetHealthEndpoint(CiSystemConfig ci)
{
return ci.Type.ToLowerInvariant() switch
{
"jenkins" => $"{ci.Url.TrimEnd('/')}/api/json",
"gitlab" => $"{ci.Url.TrimEnd('/')}/api/v4/version",
"github" => "https://api.github.com/rate_limit",
"azure" => $"{ci.Url.TrimEnd('/')}/_apis/connectionData",
_ => $"{ci.Url.TrimEnd('/')}/health"
};
}
private static void AddCiEvidence(EvidenceBuilder eb, List<CiSystemResult> results)
{
foreach (var r in results)
{
var prefix = $"ci_{r.Name.ToLowerInvariant().Replace(" ", "_").Replace("-", "_")}";
eb.Add($"{prefix}_type", r.Type);
eb.Add($"{prefix}_reachable", r.Reachable.ToString().ToLowerInvariant());
eb.Add($"{prefix}_auth_success", r.AuthSuccess.ToString().ToLowerInvariant());
eb.Add($"{prefix}_latency_ms", r.LatencyMs.ToString(CultureInfo.InvariantCulture));
if (r.TotalRunners > 0)
{
eb.Add($"{prefix}_available_runners", r.AvailableRunners.ToString(CultureInfo.InvariantCulture));
eb.Add($"{prefix}_total_runners", r.TotalRunners.ToString(CultureInfo.InvariantCulture));
}
}
}
private static List<CiSystemConfig> GetConfiguredCiSystems(IConfiguration config)
{
var systems = new List<CiSystemConfig>();
var ciSection = config.GetSection("CI:Systems");
if (ciSection.Exists())
{
foreach (var child in ciSection.GetChildren())
{
var name = child.GetValue<string>("Name") ?? child.Key;
var url = child.GetValue<string>("Url");
var type = child.GetValue<string>("Type") ?? "generic";
var token = child.GetValue<string>("ApiToken");
if (!string.IsNullOrEmpty(url))
{
systems.Add(new CiSystemConfig
{
Name = name,
Url = url,
Type = type,
ApiToken = token
});
}
}
}
// Check legacy single-system config
var legacyUrl = config.GetValue<string>("CI:Url");
if (!string.IsNullOrEmpty(legacyUrl) && systems.Count == 0)
{
systems.Add(new CiSystemConfig
{
Name = "default",
Url = legacyUrl,
Type = config.GetValue<string>("CI:Type") ?? "generic",
ApiToken = config.GetValue<string>("CI:ApiToken")
});
}
return systems;
}
private sealed class CiSystemConfig
{
public required string Name { get; init; }
public required string Url { get; init; }
public required string Type { get; init; }
public string? ApiToken { get; init; }
}
private sealed class CiSystemResult
{
public required string Name { get; init; }
public required string Type { get; init; }
public bool Reachable { get; set; }
public bool AuthSuccess { get; set; }
public int LatencyMs { get; set; }
public int TotalRunners { get; set; }
public int AvailableRunners { get; set; }
public string? ErrorMessage { get; set; }
}
}

View File

@@ -0,0 +1,306 @@
// -----------------------------------------------------------------------------
// IntegrationWebhookHealthCheck.cs
// Sprint: SPRINT_20260118_018_Doctor_integration_health_expansion
// Task: INTH-007 - Implement IntegrationWebhookHealthCheck
// Description: Check health of all configured webhooks (inbound and outbound)
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Net.Http;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Checks health of all configured webhooks.
/// Monitors delivery success rates, endpoint availability, and recent failures.
/// </summary>
public sealed class IntegrationWebhookHealthCheck : IDoctorCheck
{
private const string PluginId = "stellaops.doctor.integration";
private const string CategoryName = "Integration";
private const double FailureRateWarningThreshold = 0.05; // 5%
private const double FailureRateFailThreshold = 0.20; // 20%
/// <inheritdoc />
public string CheckId => "check.integration.webhooks";
/// <inheritdoc />
public string Name => "Integration Webhook Health";
/// <inheritdoc />
public string Description => "Check health of all configured webhooks (inbound and outbound)";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["integration", "webhooks", "notifications", "events"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(10);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var webhooksConfig = context.Configuration.GetSection("Webhooks");
return webhooksConfig.Exists() && webhooksConfig.GetChildren().Any();
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
var webhooks = GetConfiguredWebhooks(context.Configuration);
if (webhooks.Count == 0)
{
return builder
.Skip("No webhooks configured")
.WithEvidence("Webhooks", eb => eb.Add("configured_webhooks", "0"))
.Build();
}
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder
.Skip("IHttpClientFactory not available")
.Build();
}
var httpClient = httpClientFactory.CreateClient("DoctorHealthCheck");
httpClient.Timeout = TimeSpan.FromSeconds(10);
var results = new List<WebhookResult>();
var unreachable = new List<string>();
var highFailureRate = new List<(string Name, double Rate)>();
foreach (var webhook in webhooks)
{
var result = await CheckWebhookAsync(httpClient, webhook, ct);
results.Add(result);
if (!result.Reachable)
{
unreachable.Add(webhook.Name);
}
else if (result.FailureRate >= FailureRateFailThreshold)
{
highFailureRate.Add((webhook.Name, result.FailureRate));
}
}
if (unreachable.Count > 0)
{
return builder
.Fail($"{unreachable.Count} webhook endpoint(s) unreachable")
.WithEvidence("Webhooks", eb =>
{
eb.Add("total_webhooks", webhooks.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_webhooks", (webhooks.Count - unreachable.Count).ToString(CultureInfo.InvariantCulture));
eb.Add("unreachable_webhooks", string.Join(", ", unreachable));
AddWebhookEvidence(eb, results);
})
.WithCauses(
"Webhook endpoint is down",
"Network connectivity issue",
"DNS resolution failed",
"TLS certificate issue")
.WithRemediation(rb =>
{
rb.AddStep(1, "Test webhook endpoint",
$"stella webhooks test {unreachable[0]}",
CommandType.Shell);
rb.AddStep(2, "View webhook delivery log",
$"stella webhooks logs {unreachable[0]}",
CommandType.Shell);
})
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (highFailureRate.Count > 0)
{
return builder
.Fail($"{highFailureRate.Count} webhook(s) have high failure rate (>20%)")
.WithEvidence("Webhooks", eb =>
{
eb.Add("total_webhooks", webhooks.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("high_failure_webhooks", string.Join(", ", highFailureRate.Select(h => $"{h.Name}:{h.Rate:P0}")));
AddWebhookEvidence(eb, results);
})
.WithCauses(
"Endpoint returning errors",
"Payload format changed",
"Authentication issue",
"Rate limiting")
.WithRemediation(rb =>
{
rb.AddStep(1, "View recent failures",
$"stella webhooks logs {highFailureRate[0].Name} --status failed",
CommandType.Shell);
rb.AddStep(2, "Retry failed deliveries",
$"stella webhooks retry {highFailureRate[0].Name}",
CommandType.Manual);
})
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
// Check for warning-level failure rates
var warningFailureRate = results
.Where(r => r.FailureRate >= FailureRateWarningThreshold && r.FailureRate < FailureRateFailThreshold)
.ToList();
if (warningFailureRate.Count > 0)
{
return builder
.Warn($"{warningFailureRate.Count} webhook(s) have elevated failure rate (>5%)")
.WithEvidence("Webhooks", eb =>
{
eb.Add("total_webhooks", webhooks.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("elevated_failure_webhooks", string.Join(", ", warningFailureRate.Select(w => $"{w.Name}:{w.FailureRate:P0}")));
AddWebhookEvidence(eb, results);
})
.WithCauses("Intermittent endpoint issues", "Occasional timeouts")
.WithRemediation(rb => rb
.AddStep(1, "Monitor webhook metrics",
"stella webhooks stats",
CommandType.Shell))
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
return builder
.Pass($"{webhooks.Count} webhook(s) healthy")
.WithEvidence("Webhooks", eb =>
{
eb.Add("total_webhooks", webhooks.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_webhooks", webhooks.Count.ToString(CultureInfo.InvariantCulture));
AddWebhookEvidence(eb, results);
})
.Build();
}
private static async Task<WebhookResult> CheckWebhookAsync(HttpClient client, WebhookConfig webhook, CancellationToken ct)
{
var result = new WebhookResult { Name = webhook.Name, Direction = webhook.Direction };
// For outbound webhooks, we can ping the endpoint
// For inbound webhooks, we check if our endpoint is ready to receive
if (webhook.Direction == "outbound")
{
try
{
// Use HEAD or GET to check reachability (don't actually send webhook)
var request = new HttpRequestMessage(HttpMethod.Head, webhook.Url);
var sw = System.Diagnostics.Stopwatch.StartNew();
var response = await client.SendAsync(request, ct);
sw.Stop();
result.LatencyMs = (int)sw.ElapsedMilliseconds;
// Most webhook endpoints return 405 for HEAD, 401 for GET - both indicate reachable
result.Reachable = (int)response.StatusCode < 500;
}
catch (HttpRequestException ex)
{
result.Reachable = false;
result.ErrorMessage = ex.Message;
}
catch (TaskCanceledException)
{
result.Reachable = false;
result.ErrorMessage = "Timeout";
}
}
else
{
// For inbound webhooks, just mark as reachable (endpoint is local)
result.Reachable = true;
}
// Populate delivery stats from webhook config/history
result.TotalDeliveries = webhook.TotalDeliveries;
result.SuccessfulDeliveries = webhook.SuccessfulDeliveries;
result.FailedDeliveries = webhook.TotalDeliveries - webhook.SuccessfulDeliveries;
result.FailureRate = webhook.TotalDeliveries > 0
? (double)result.FailedDeliveries / webhook.TotalDeliveries
: 0;
return result;
}
private static void AddWebhookEvidence(EvidenceBuilder eb, List<WebhookResult> results)
{
foreach (var r in results)
{
var prefix = $"webhook_{r.Name.ToLowerInvariant().Replace(" ", "_").Replace("-", "_")}";
eb.Add($"{prefix}_direction", r.Direction);
eb.Add($"{prefix}_reachable", r.Reachable.ToString().ToLowerInvariant());
eb.Add($"{prefix}_latency_ms", r.LatencyMs.ToString(CultureInfo.InvariantCulture));
eb.Add($"{prefix}_failure_rate", r.FailureRate.ToString("P1", CultureInfo.InvariantCulture));
eb.Add($"{prefix}_total_deliveries", r.TotalDeliveries.ToString(CultureInfo.InvariantCulture));
}
}
private static List<WebhookConfig> GetConfiguredWebhooks(IConfiguration config)
{
var webhooks = new List<WebhookConfig>();
var webhooksSection = config.GetSection("Webhooks:Endpoints");
if (webhooksSection.Exists())
{
foreach (var child in webhooksSection.GetChildren())
{
var name = child.GetValue<string>("Name") ?? child.Key;
var url = child.GetValue<string>("Url");
var direction = child.GetValue<string>("Direction") ?? "outbound";
var total = child.GetValue<int>("TotalDeliveries");
var successful = child.GetValue<int>("SuccessfulDeliveries");
if (!string.IsNullOrEmpty(url))
{
webhooks.Add(new WebhookConfig
{
Name = name,
Url = url,
Direction = direction,
TotalDeliveries = total,
SuccessfulDeliveries = successful
});
}
}
}
return webhooks;
}
private sealed class WebhookConfig
{
public required string Name { get; init; }
public required string Url { get; init; }
public required string Direction { get; init; }
public int TotalDeliveries { get; init; }
public int SuccessfulDeliveries { get; init; }
}
private sealed class WebhookResult
{
public required string Name { get; init; }
public required string Direction { get; init; }
public bool Reachable { get; set; }
public int LatencyMs { get; set; }
public int TotalDeliveries { get; set; }
public int SuccessfulDeliveries { get; set; }
public int FailedDeliveries { get; set; }
public double FailureRate { get; set; }
public string? ErrorMessage { get; set; }
}
}

View File

@@ -0,0 +1,338 @@
// -----------------------------------------------------------------------------
// SecretsManagerConnectivityCheck.cs
// Sprint: SPRINT_20260118_018_Doctor_integration_health_expansion
// Task: INTH-004 - Implement SecretsManagerConnectivityCheck
// Description: Verify connectivity to secrets managers (Vault, AWS Secrets Manager, Azure Key Vault, etc.)
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Net.Http;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
namespace StellaOps.Doctor.Plugins.Integration.Checks;
/// <summary>
/// Verifies connectivity to configured secrets managers.
/// Checks authentication, seal status (Vault), and API accessibility.
/// </summary>
public sealed class SecretsManagerConnectivityCheck : IDoctorCheck
{
private const string PluginId = "stellaops.doctor.integration";
private const string CategoryName = "Integration";
/// <inheritdoc />
public string CheckId => "check.integration.secrets.manager";
/// <inheritdoc />
public string Name => "Secrets Manager Connectivity";
/// <inheritdoc />
public string Description => "Verify connectivity to secrets managers (Vault, AWS Secrets Manager, Azure Key Vault)";
/// <inheritdoc />
public DoctorSeverity DefaultSeverity => DoctorSeverity.Fail;
/// <inheritdoc />
public IReadOnlyList<string> Tags => ["integration", "secrets", "vault", "security", "keyvault"];
/// <inheritdoc />
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(10);
/// <inheritdoc />
public bool CanRun(DoctorPluginContext context)
{
var secretsConfig = context.Configuration.GetSection("Secrets");
return secretsConfig.Exists() && secretsConfig.GetChildren().Any();
}
/// <inheritdoc />
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
{
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
var managers = GetConfiguredSecretsManagers(context.Configuration);
if (managers.Count == 0)
{
return builder
.Skip("No secrets managers configured")
.WithEvidence("Secrets Managers", eb => eb.Add("configured_managers", "0"))
.Build();
}
var httpClientFactory = context.Services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
return builder
.Skip("IHttpClientFactory not available")
.Build();
}
var httpClient = httpClientFactory.CreateClient("DoctorHealthCheck");
httpClient.Timeout = TimeSpan.FromSeconds(10);
var results = new List<SecretsManagerResult>();
var unhealthy = new List<string>();
var sealed_ = new List<string>(); // 'sealed' is a keyword
foreach (var mgr in managers)
{
var result = await CheckSecretsManagerAsync(httpClient, mgr, ct);
results.Add(result);
if (!result.Reachable || !result.AuthSuccess)
{
unhealthy.Add(mgr.Name);
}
else if (result.IsSealed)
{
sealed_.Add(mgr.Name);
}
}
// Secrets manager issues are critical - blocks deployments
if (unhealthy.Count > 0)
{
return builder
.Fail($"{unhealthy.Count} secrets manager(s) unreachable")
.WithEvidence("Secrets Managers", eb =>
{
eb.Add("total_managers", managers.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_managers", (managers.Count - unhealthy.Count).ToString(CultureInfo.InvariantCulture));
eb.Add("unhealthy_managers", string.Join(", ", unhealthy));
AddSecretsEvidence(eb, results);
})
.WithCauses(
"Secrets manager is down",
"Network connectivity issue",
"Authentication token expired",
"TLS certificate issue")
.WithRemediation(rb =>
{
rb.AddStep(1, "Test secrets manager connectivity",
$"stella secrets ping {unhealthy[0]}",
CommandType.Shell);
rb.AddStep(2, "Refresh authentication",
$"stella secrets auth refresh {unhealthy[0]}",
CommandType.Manual);
})
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
if (sealed_.Count > 0)
{
return builder
.Fail($"{sealed_.Count} Vault instance(s) are sealed")
.WithEvidence("Secrets Managers", eb =>
{
eb.Add("total_managers", managers.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_managers", (managers.Count - sealed_.Count).ToString(CultureInfo.InvariantCulture));
eb.Add("sealed_vaults", string.Join(", ", sealed_));
AddSecretsEvidence(eb, results);
})
.WithCauses(
"Vault was restarted and needs unseal",
"Vault auto-seal triggered",
"HSM connectivity lost")
.WithRemediation(rb =>
{
rb.AddStep(1, "Unseal Vault",
$"vault operator unseal",
CommandType.Manual);
rb.AddStep(2, "Check seal status",
$"stella secrets status {sealed_[0]}",
CommandType.Shell);
})
.WithVerification($"stella doctor --check {CheckId}")
.Build();
}
return builder
.Pass($"{managers.Count} secrets manager(s) healthy")
.WithEvidence("Secrets Managers", eb =>
{
eb.Add("total_managers", managers.Count.ToString(CultureInfo.InvariantCulture));
eb.Add("healthy_managers", managers.Count.ToString(CultureInfo.InvariantCulture));
AddSecretsEvidence(eb, results);
})
.Build();
}
private static async Task<SecretsManagerResult> CheckSecretsManagerAsync(
HttpClient client, SecretsManagerConfig mgr, CancellationToken ct)
{
var result = new SecretsManagerResult { Name = mgr.Name, Type = mgr.Type };
try
{
var healthEndpoint = GetHealthEndpoint(mgr);
var request = new HttpRequestMessage(HttpMethod.Get, healthEndpoint);
// Add auth headers based on type
if (!string.IsNullOrEmpty(mgr.Token))
{
if (mgr.Type.Equals("vault", StringComparison.OrdinalIgnoreCase))
{
request.Headers.Add("X-Vault-Token", mgr.Token);
}
else
{
request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", mgr.Token);
}
}
var sw = System.Diagnostics.Stopwatch.StartNew();
var response = await client.SendAsync(request, ct);
sw.Stop();
result.LatencyMs = (int)sw.ElapsedMilliseconds;
result.Reachable = true;
result.AuthSuccess = response.StatusCode != System.Net.HttpStatusCode.Unauthorized &&
response.StatusCode != System.Net.HttpStatusCode.Forbidden;
// Parse response for type-specific info
if (response.IsSuccessStatusCode)
{
var json = await response.Content.ReadAsStringAsync(ct);
ParseSecretsManagerResponse(json, mgr.Type, result);
}
}
catch (HttpRequestException ex)
{
result.Reachable = false;
result.ErrorMessage = ex.Message;
}
catch (TaskCanceledException)
{
result.Reachable = false;
result.ErrorMessage = "Timeout";
}
return result;
}
private static void ParseSecretsManagerResponse(string json, string type, SecretsManagerResult result)
{
try
{
using var doc = JsonDocument.Parse(json);
if (type.Equals("vault", StringComparison.OrdinalIgnoreCase))
{
// Vault health endpoint returns sealed status
if (doc.RootElement.TryGetProperty("sealed", out var sealedEl))
{
result.IsSealed = sealedEl.GetBoolean();
}
if (doc.RootElement.TryGetProperty("initialized", out var initEl))
{
result.IsInitialized = initEl.GetBoolean();
}
if (doc.RootElement.TryGetProperty("version", out var verEl))
{
result.Version = verEl.GetString();
}
}
}
catch { }
}
private static string GetHealthEndpoint(SecretsManagerConfig mgr)
{
return mgr.Type.ToLowerInvariant() switch
{
"vault" => $"{mgr.Url.TrimEnd('/')}/v1/sys/health?standbyok=true&sealedcode=200&uninitcode=200",
"aws" => mgr.Url, // AWS uses SDK, URL is just for config reference
"azure" => $"{mgr.Url.TrimEnd('/')}/healthstatus",
"gcp" => mgr.Url, // GCP uses SDK
_ => $"{mgr.Url.TrimEnd('/')}/health"
};
}
private static void AddSecretsEvidence(EvidenceBuilder eb, List<SecretsManagerResult> results)
{
foreach (var r in results)
{
var prefix = $"secrets_{r.Name.ToLowerInvariant().Replace(" ", "_").Replace("-", "_")}";
eb.Add($"{prefix}_type", r.Type);
eb.Add($"{prefix}_reachable", r.Reachable.ToString().ToLowerInvariant());
eb.Add($"{prefix}_auth_success", r.AuthSuccess.ToString().ToLowerInvariant());
eb.Add($"{prefix}_latency_ms", r.LatencyMs.ToString(CultureInfo.InvariantCulture));
if (r.Type.Equals("vault", StringComparison.OrdinalIgnoreCase))
{
eb.Add($"{prefix}_sealed", r.IsSealed.ToString().ToLowerInvariant());
eb.Add($"{prefix}_initialized", r.IsInitialized.ToString().ToLowerInvariant());
}
}
}
private static List<SecretsManagerConfig> GetConfiguredSecretsManagers(IConfiguration config)
{
var managers = new List<SecretsManagerConfig>();
var secretsSection = config.GetSection("Secrets:Managers");
if (secretsSection.Exists())
{
foreach (var child in secretsSection.GetChildren())
{
var name = child.GetValue<string>("Name") ?? child.Key;
var url = child.GetValue<string>("Url");
var type = child.GetValue<string>("Type") ?? "vault";
var token = child.GetValue<string>("Token");
if (!string.IsNullOrEmpty(url))
{
managers.Add(new SecretsManagerConfig
{
Name = name,
Url = url,
Type = type,
Token = token
});
}
}
}
// Check legacy single-manager config
var legacyUrl = config.GetValue<string>("Secrets:Vault:Url")
?? config.GetValue<string>("Vault:Url");
if (!string.IsNullOrEmpty(legacyUrl) && managers.Count == 0)
{
managers.Add(new SecretsManagerConfig
{
Name = "vault",
Url = legacyUrl,
Type = "vault",
Token = config.GetValue<string>("Secrets:Vault:Token") ?? config.GetValue<string>("Vault:Token")
});
}
return managers;
}
private sealed class SecretsManagerConfig
{
public required string Name { get; init; }
public required string Url { get; init; }
public required string Type { get; init; }
public string? Token { get; init; }
}
private sealed class SecretsManagerResult
{
public required string Name { get; init; }
public required string Type { get; init; }
public bool Reachable { get; set; }
public bool AuthSuccess { get; set; }
public int LatencyMs { get; set; }
public bool IsSealed { get; set; }
public bool IsInitialized { get; set; } = true;
public string? Version { get; set; }
public string? ErrorMessage { get; set; }
}
}

View File

@@ -37,7 +37,10 @@ public sealed class IntegrationPlugin : IDoctorPlugin
new TeamsWebhookCheck(),
new GitProviderCheck(),
new LdapConnectivityCheck(),
new OidcProviderCheck()
new OidcProviderCheck(),
new CiSystemConnectivityCheck(),
new SecretsManagerConnectivityCheck(),
new IntegrationWebhookHealthCheck()
];
/// <inheritdoc />

View File

@@ -33,6 +33,20 @@ public sealed record RemediationStep
/// Key is the placeholder name (e.g., "HOSTNAME"), value is the description.
/// </summary>
public IReadOnlyDictionary<string, string>? Placeholders { get; init; }
/// <summary>
/// Indicates if this step performs destructive operations (delete, truncate, drop, reset, etc.).
/// Destructive steps should not be auto-executed by AdvisoryAI.
/// Added as part of SPRINT_20260118_015_Doctor_check_quality_improvements (DQUAL-005).
/// </summary>
public bool IsDestructive { get; init; }
/// <summary>
/// A safe dry-run variant of the command that previews what would happen without making changes.
/// For example, "rm -rf /path" might have a dry-run variant of "find /path -type f | head -20".
/// Added as part of SPRINT_20260118_015_Doctor_check_quality_improvements (DQUAL-005).
/// </summary>
public string? DryRunVariant { get; init; }
}
/// <summary>

View File

@@ -16,4 +16,8 @@
<ProjectReference Include="..\..\StellaOps.TestKit\StellaOps.TestKit.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -11,4 +11,8 @@
<ProjectReference Include="../../StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
<ProjectReference Include="../../StellaOps.TestKit/StellaOps.TestKit.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -0,0 +1,360 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-005 - Unit tests for GateEvaluator
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.DeltaVerdict.Bundles;
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.DeltaVerdict.Tests.Bundles;
public class GateEvaluatorTests
{
private readonly GateEvaluator _evaluator = new();
private readonly DateTimeOffset _evaluatedAt = DateTimeOffset.Parse("2026-01-18T12:00:00Z");
[Theory]
[InlineData(0.70, GateAction.Block)] // Above block threshold
[InlineData(0.65, GateAction.Block)] // At block threshold
[InlineData(0.64, GateAction.Warn)] // Below block, above warn
[InlineData(0.50, GateAction.Warn)] // In warn range
[InlineData(0.40, GateAction.Warn)] // At warn threshold
[InlineData(0.39, GateAction.Pass)] // Below warn threshold
[InlineData(0.10, GateAction.Pass)] // Well below all thresholds
public void Evaluate_WithDefaultConfig_ReturnsExpectedAction(double score, GateAction expectedAction)
{
// Arrange
var input = CreateTestInput();
var config = GateConfiguration.Default;
// Act
var decision = _evaluator.Evaluate(score, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(expectedAction);
}
[Fact]
public void Evaluate_BlockDecision_ContainsCorrectThreshold()
{
// Arrange
var input = CreateTestInput();
var config = GateConfiguration.Default;
// Act
var decision = _evaluator.Evaluate(0.70, input, config, _evaluatedAt);
// Assert
decision.Threshold.Should().Be(0.65);
decision.MatchedRules.Should().Contain("block_threshold");
}
[Fact]
public void Evaluate_WithTrustedVexNotAffected_ReturnsPass()
{
// Arrange
var input = CreateTestInput(vexStatus: "not_affected", vexSource: ".vex/document.json");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var decision = _evaluator.Evaluate(0.80, input, config, _evaluatedAt); // Would normally block
// Assert
decision.Action.Should().Be(GateAction.Pass);
decision.MatchedRules.Should().Contain("auto_pass_trusted_vex");
decision.Reason.Should().Contain("not_affected");
}
[Fact]
public void Evaluate_WithTrustedVexFixed_ReturnsPass()
{
// Arrange
var input = CreateTestInput(vexStatus: "fixed", vexSource: "vendor:acme");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var decision = _evaluator.Evaluate(0.80, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Pass);
}
[Fact]
public void Evaluate_WithUntrustedVexSource_DoesNotAutoPass()
{
// Arrange
var input = CreateTestInput(vexStatus: "not_affected", vexSource: "unknown-source");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var decision = _evaluator.Evaluate(0.80, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Block); // VEX not authoritative
}
[Fact]
public void Evaluate_WithVexAffected_DoesNotAutoPass()
{
// Arrange
var input = CreateTestInput(vexStatus: "affected", vexSource: "vendor:acme");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var decision = _evaluator.Evaluate(0.80, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Block); // affected status doesn't auto-pass
}
[Fact]
public void Evaluate_WithAutoPassDisabled_IgnoresVex()
{
// Arrange
var input = CreateTestInput(vexStatus: "not_affected", vexSource: ".vex/document.json");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = false };
// Act
var decision = _evaluator.Evaluate(0.80, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Block);
}
[Fact]
public void Evaluate_WarnWithHighPatchProof_ReturnsPass()
{
// Arrange
var input = CreateTestInput(patchProofConfidence: 0.75); // Above bypass (0.70)
var config = GateConfiguration.Default;
// Act
var decision = _evaluator.Evaluate(0.50, input, config, _evaluatedAt); // In warn range
// Assert
decision.Action.Should().Be(GateAction.Pass);
decision.MatchedRules.Should().Contain("warn_threshold");
decision.MatchedRules.Should().Contain("patch_proof_bypass");
}
[Fact]
public void Evaluate_WarnWithLowPatchProof_ReturnsWarn()
{
// Arrange
var input = CreateTestInput(patchProofConfidence: 0.50); // Below bypass
var config = GateConfiguration.Default;
// Act
var decision = _evaluator.Evaluate(0.50, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Warn);
}
[Fact]
public void Evaluate_StrictConfig_HasLowerThresholds()
{
// Arrange
var input = CreateTestInput();
var config = GateConfiguration.Strict;
// Act
var decision55 = _evaluator.Evaluate(0.55, input, config, _evaluatedAt);
var decision35 = _evaluator.Evaluate(0.35, input, config, _evaluatedAt);
// Assert
decision55.Action.Should().Be(GateAction.Block); // 0.50 strict block threshold
decision35.Action.Should().Be(GateAction.Warn); // 0.30 strict warn threshold
}
[Fact]
public void Evaluate_BlockDecision_IncludesSuggestions()
{
// Arrange
var input = CreateTestInput();
var config = GateConfiguration.Default;
// Act
var decision = _evaluator.Evaluate(0.75, input, config, _evaluatedAt);
// Assert
decision.Suggestions.Should().NotBeEmpty();
decision.Suggestions.Should().Contain(s => s.Contains("urgently") || s.Contains("VEX"));
}
[Fact]
public void Evaluate_WarnDecision_IncludesSuggestions()
{
// Arrange
var input = CreateTestInput();
var config = GateConfiguration.Default;
// Act
var decision = _evaluator.Evaluate(0.50, input, config, _evaluatedAt);
// Assert
decision.Suggestions.Should().NotBeEmpty();
}
[Fact]
public void Evaluate_WithCustomBlockRule_OverridesDefaultThreshold()
{
// Arrange
var input = CreateTestInput(cvssBase: 9.0);
var customRule = new GateRule
{
Id = "critical-cvss",
Name = "Block Critical CVSS",
Condition = "cvss >= 9.0",
Action = GateAction.Block,
Priority = 0 // High priority
};
var config = GateConfiguration.Default with
{
CustomRules = ImmutableArray.Create(customRule)
};
// Act
var decision = _evaluator.Evaluate(0.30, input, config, _evaluatedAt); // Would normally pass
// Assert
decision.Action.Should().Be(GateAction.Block);
decision.MatchedRules.Should().Contain("critical-cvss");
}
[Fact]
public void Evaluate_WithCustomEpssRule_Matches()
{
// Arrange
var input = CreateTestInput(epssScore: 0.6);
var customRule = new GateRule
{
Id = "high-epss",
Name = "Block High EPSS",
Condition = "epss >= 0.5",
Action = GateAction.Block,
Priority = 0
};
var config = GateConfiguration.Default with
{
CustomRules = ImmutableArray.Create(customRule)
};
// Act
var decision = _evaluator.Evaluate(0.30, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Block);
decision.MatchedRules.Should().Contain("high-epss");
}
[Fact]
public void Evaluate_CustomRulesRespectPriority()
{
// Arrange
var input = CreateTestInput(cvssBase: 8.0, epssScore: 0.6);
var rule1 = new GateRule
{
Id = "warn-cvss",
Name = "Warn High CVSS",
Condition = "cvss >= 7.0",
Action = GateAction.Warn,
Priority = 10 // Lower priority
};
var rule2 = new GateRule
{
Id = "block-epss",
Name = "Block High EPSS",
Condition = "epss >= 0.5",
Action = GateAction.Block,
Priority = 1 // Higher priority
};
var config = GateConfiguration.Default with
{
CustomRules = ImmutableArray.Create(rule1, rule2)
};
// Act
var decision = _evaluator.Evaluate(0.30, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Block);
decision.MatchedRules.Should().Contain("block-epss");
}
[Fact]
public void EvaluateBatch_EvaluatesAllFindings()
{
// Arrange
var findings = new List<(double FinalScore, EvidenceWeightedScoreInput Input)>
{
(0.75, CreateTestInput()),
(0.50, CreateTestInput()),
(0.20, CreateTestInput())
};
var config = GateConfiguration.Default;
// Act
var decisions = _evaluator.EvaluateBatch(findings, config, _evaluatedAt);
// Assert
decisions.Should().HaveCount(3);
decisions[0].Action.Should().Be(GateAction.Block);
decisions[1].Action.Should().Be(GateAction.Warn);
decisions[2].Action.Should().Be(GateAction.Pass);
}
[Fact]
public void Evaluate_InProjectVex_IsAlwaysAuthoritative()
{
// Arrange
var input = CreateTestInput(vexStatus: "not_affected", vexSource: "in-project:local");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var decision = _evaluator.Evaluate(0.90, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Pass);
}
[Fact]
public void Evaluate_VendorPrefixVex_IsAuthoritative()
{
// Arrange
var input = CreateTestInput(vexStatus: "fixed", vexSource: "vendor:microsoft");
var config = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var decision = _evaluator.Evaluate(0.90, input, config, _evaluatedAt);
// Assert
decision.Action.Should().Be(GateAction.Pass);
}
private static EvidenceWeightedScoreInput CreateTestInput(
double cvssBase = 7.5,
double epssScore = 0.42,
double patchProofConfidence = 0.3,
string? vexStatus = null,
string? vexSource = null)
{
return new EvidenceWeightedScoreInput
{
FindingId = "CVE-2024-1234@pkg:npm/lodash@4.17.20",
CvssBase = cvssBase,
EpssScore = epssScore,
PatchProofConfidence = patchProofConfidence,
Rch = 0.7,
Rts = 0.3,
Bkp = 0.5,
Xpl = 0.6,
Src = 0.8,
Mit = 0.2,
VexStatus = vexStatus,
VexSource = vexSource
};
}
}

View File

@@ -0,0 +1,366 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-002 - Unit tests for VerdictBundleBuilder
using FluentAssertions;
using StellaOps.DeltaVerdict.Bundles;
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.DeltaVerdict.Tests.Bundles;
public class VerdictBundleBuilderTests
{
private readonly TimeProvider _fixedTimeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2026-01-18T12:00:00Z"));
private readonly VerdictBundleBuilder _builder;
public VerdictBundleBuilderTests()
{
var gateEvaluator = new GateEvaluator();
_builder = new VerdictBundleBuilder(gateEvaluator, _fixedTimeProvider);
}
[Fact]
public void Build_WithValidInput_ReturnsVerdictBundle()
{
// Arrange
var ewsResult = CreateTestEwsResult();
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle = _builder.Build(ewsResult, input, policy);
// Assert
bundle.Should().NotBeNull();
bundle.FindingId.Should().Be("CVE-2024-1234@pkg:npm/lodash@4.17.20");
bundle.SchemaVersion.Should().Be(VerdictBundle.CurrentSchemaVersion);
bundle.BundleId.Should().StartWith("sha256:");
bundle.BundleDigest.Should().Be(bundle.BundleId);
}
[Fact]
public void Build_ExtractsInputs_WithCorrectValues()
{
// Arrange
var ewsResult = CreateTestEwsResult();
var input = CreateTestEwsInput(cvssBase: 7.5, epssScore: 0.42, reachability: 0.8);
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle = _builder.Build(ewsResult, input, policy);
// Assert
bundle.Inputs.Cvss.BaseScore.Should().Be(7.5);
bundle.Inputs.Epss.Probability.Should().Be(0.42);
bundle.Inputs.Reachability.Value.Should().Be(0.8);
}
[Fact]
public void Build_CreatesNormalizationTrace_FromBreakdown()
{
// Arrange
var ewsResult = CreateTestEwsResult();
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle = _builder.Build(ewsResult, input, policy);
// Assert
bundle.Normalization.Should().NotBeNull();
bundle.Normalization.Dimensions.Should().NotBeEmpty();
bundle.Normalization.Dimensions
.Should().Contain(d => d.Symbol == "CVS" || d.Symbol == "RCH");
}
[Fact]
public void Build_ComputesScores_Correctly()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 65);
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle = _builder.Build(ewsResult, input, policy);
// Assert
bundle.FinalScore.Should().Be(0.65);
}
[Fact]
public void Build_WithBlockingScore_ReturnsBlockDecision()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 70); // Above default block threshold (0.65)
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
var gateConfig = GateConfiguration.Default;
// Act
var bundle = _builder.Build(ewsResult, input, policy, gateConfig);
// Assert
bundle.Gate.Action.Should().Be(GateAction.Block);
bundle.Gate.Threshold.Should().Be(0.65);
bundle.Gate.MatchedRules.Should().Contain("block_threshold");
}
[Fact]
public void Build_WithWarnScore_ReturnsWarnDecision()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 50); // Between warn (0.40) and block (0.65)
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
var gateConfig = GateConfiguration.Default;
// Act
var bundle = _builder.Build(ewsResult, input, policy, gateConfig);
// Assert
bundle.Gate.Action.Should().Be(GateAction.Warn);
bundle.Gate.Threshold.Should().Be(0.40);
}
[Fact]
public void Build_WithPassScore_ReturnsPassDecision()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 30); // Below warn threshold
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
var gateConfig = GateConfiguration.Default;
// Act
var bundle = _builder.Build(ewsResult, input, policy, gateConfig);
// Assert
bundle.Gate.Action.Should().Be(GateAction.Pass);
}
[Fact]
public void Build_WithTrustedVexNotAffected_ReturnsPassDecision()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 70, flags: ["vex-override", "vendor-na"]);
var input = CreateTestEwsInput(vexStatus: "not_affected", vexSource: ".vex/document.json");
var policy = EvidenceWeightPolicy.AdvisoryProduction;
var gateConfig = GateConfiguration.Default with { AutoPassOnTrustedVex = true };
// Act
var bundle = _builder.Build(ewsResult, input, policy, gateConfig);
// Assert
bundle.Gate.Action.Should().Be(GateAction.Pass);
bundle.Gate.MatchedRules.Should().Contain("auto_pass_trusted_vex");
}
[Fact]
public void Build_WithVexOverride_SetsOverrideField()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 0, flags: ["vex-override", "vendor-na"]);
var input = CreateTestEwsInput(vexStatus: "not_affected", vexSource: "vendor:acme");
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle = _builder.Build(ewsResult, input, policy);
// Assert
bundle.Override.Should().NotBeNull();
bundle.Override!.Applied.Should().BeTrue();
bundle.Override.Type.Should().Be("vex_not_affected");
}
[Fact]
public void Build_GeneratesContentAddressedBundleId()
{
// Arrange
var ewsResult = CreateTestEwsResult();
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle1 = _builder.Build(ewsResult, input, policy);
var bundle2 = _builder.Build(ewsResult, input, policy);
// Assert - Same inputs should produce same digest
bundle1.BundleId.Should().Be(bundle2.BundleId);
}
[Fact]
public void Build_DifferentInputs_ProduceDifferentBundleIds()
{
// Arrange
var ewsResult1 = CreateTestEwsResult(score: 50);
var ewsResult2 = CreateTestEwsResult(score: 60);
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle1 = _builder.Build(ewsResult1, input, policy);
var bundle2 = _builder.Build(ewsResult2, input, policy);
// Assert
bundle1.BundleId.Should().NotBe(bundle2.BundleId);
}
[Fact]
public void Build_SetsComputedAtTimestamp()
{
// Arrange
var expectedTime = DateTimeOffset.Parse("2026-01-18T12:00:00Z");
var ewsResult = CreateTestEwsResult();
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
// Act
var bundle = _builder.Build(ewsResult, input, policy);
// Assert
bundle.ComputedAt.Should().Be(expectedTime);
}
[Fact]
public void Build_WithStrictGateConfiguration_LowersThresholds()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 55); // Above strict block (0.50), below default block (0.65)
var input = CreateTestEwsInput();
var policy = EvidenceWeightPolicy.AdvisoryProduction;
var gateConfig = GateConfiguration.Strict;
// Act
var bundle = _builder.Build(ewsResult, input, policy, gateConfig);
// Assert
bundle.Gate.Action.Should().Be(GateAction.Block);
bundle.Gate.Threshold.Should().Be(0.50);
}
[Fact]
public void Build_WithPatchProofBypass_ConvertsWarnToPass()
{
// Arrange
var ewsResult = CreateTestEwsResult(score: 50); // In warn range
var input = CreateTestEwsInput(patchProofConfidence: 0.75); // Above bypass threshold (0.70)
var policy = EvidenceWeightPolicy.AdvisoryProduction;
var gateConfig = GateConfiguration.Default;
// Act
var bundle = _builder.Build(ewsResult, input, policy, gateConfig);
// Assert
bundle.Gate.Action.Should().Be(GateAction.Pass);
bundle.Gate.MatchedRules.Should().Contain("patch_proof_bypass");
}
private static EvidenceWeightedScoreResult CreateTestEwsResult(
int score = 50,
string[] flags = null!)
{
flags ??= ["high-epss"];
return new EvidenceWeightedScoreResult
{
FindingId = "CVE-2024-1234@pkg:npm/lodash@4.17.20",
Score = score,
Bucket = score >= 90 ? ScoreBucket.ActNow :
score >= 70 ? ScoreBucket.ScheduleNext :
score >= 40 ? ScoreBucket.Investigate :
ScoreBucket.Watchlist,
Inputs = new EvidenceInputValues(0.7, 0.3, 0.5, 0.6, 0.8, 0.2),
Weights = EvidenceWeights.Advisory,
Breakdown =
[
new DimensionContribution
{
Dimension = "CVSS Base",
Symbol = "CVS",
InputValue = 0.75,
Weight = 0.25,
Contribution = 0.1875
},
new DimensionContribution
{
Dimension = "EPSS",
Symbol = "EPS",
InputValue = 0.42,
Weight = 0.30,
Contribution = 0.126
},
new DimensionContribution
{
Dimension = "Reachability",
Symbol = "RCH",
InputValue = 0.7,
Weight = 0.20,
Contribution = 0.14
},
new DimensionContribution
{
Dimension = "Exploit Maturity",
Symbol = "XPL",
InputValue = 0.6,
Weight = 0.10,
Contribution = 0.06
},
new DimensionContribution
{
Dimension = "Patch Proof",
Symbol = "PPF",
InputValue = 0.3,
Weight = 0.15,
Contribution = -0.045,
IsSubtractive = true
}
],
Flags = flags,
Explanations = ["CVSS: high (75%)", "EPSS: medium (42%)"],
Caps = AppliedGuardrails.None(score),
PolicyDigest = "abc123",
CalculatedAt = DateTimeOffset.Parse("2026-01-18T12:00:00Z")
};
}
private static EvidenceWeightedScoreInput CreateTestEwsInput(
double cvssBase = 7.5,
double epssScore = 0.42,
double reachability = 0.7,
double patchProofConfidence = 0.3,
string? vexStatus = null,
string? vexSource = null)
{
return new EvidenceWeightedScoreInput
{
FindingId = "CVE-2024-1234@pkg:npm/lodash@4.17.20",
CvssBase = cvssBase,
CvssVersion = "3.1",
EpssScore = epssScore,
ExploitMaturity = ExploitMaturityLevel.Functional,
PatchProofConfidence = patchProofConfidence,
Rch = reachability,
Rts = 0.3,
Bkp = 0.5,
Xpl = 0.6,
Src = 0.8,
Mit = 0.2,
VexStatus = vexStatus,
VexSource = vexSource
};
}
private sealed class FakeTimeProvider : TimeProvider
{
private readonly DateTimeOffset _utcNow;
public FakeTimeProvider(DateTimeOffset utcNow)
{
_utcNow = utcNow;
}
public override DateTimeOffset GetUtcNow() => _utcNow;
}
}

View File

@@ -0,0 +1,440 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-004 - Unit tests for VerdictRekorAnchorService
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.DeltaVerdict.Bundles;
using StellaOps.DeltaVerdict.Manifest;
namespace StellaOps.DeltaVerdict.Tests.Bundles;
public class VerdictRekorAnchorServiceTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly StubVerdictRekorClient _stubClient;
private readonly VerdictRekorAnchorService _anchorService;
private readonly VerdictSigningService _signingService;
private readonly string _testSecret;
public VerdictRekorAnchorServiceTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero));
_stubClient = new StubVerdictRekorClient(_timeProvider);
_anchorService = new VerdictRekorAnchorService(_stubClient, _timeProvider);
_signingService = new VerdictSigningService();
_testSecret = Convert.ToBase64String(new byte[32] {
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10,
0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20
});
}
[Fact]
public async Task AnchorAsync_WithSignedBundle_ReturnsAnchoredBundle()
{
// Arrange
var bundle = await CreateSignedBundle();
var options = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
// Act
var result = await _anchorService.AnchorAsync(bundle, options);
// Assert
result.IsSuccess.Should().BeTrue();
result.AnchoredBundle.Should().NotBeNull();
result.AnchoredBundle!.RekorAnchor.Should().NotBeNull();
result.Linkage.Should().NotBeNull();
}
[Fact]
public async Task AnchorAsync_WithSignedBundle_SetsRekorFields()
{
// Arrange
var bundle = await CreateSignedBundle();
var options = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
// Act
var result = await _anchorService.AnchorAsync(bundle, options);
// Assert
var anchor = result.AnchoredBundle!.RekorAnchor!;
anchor.Uuid.Should().NotBeNullOrEmpty();
anchor.LogIndex.Should().BeGreaterThan(0);
anchor.IntegratedTime.Should().BeGreaterThan(0);
}
[Fact]
public async Task AnchorAsync_WithSignedBundle_IncludesInclusionProof()
{
// Arrange
var bundle = await CreateSignedBundle();
var options = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
// Act
var result = await _anchorService.AnchorAsync(bundle, options);
// Assert
var proof = result.AnchoredBundle!.RekorAnchor!.InclusionProof;
proof.Should().NotBeNull();
proof!.TreeSize.Should().BeGreaterThan(0);
proof.RootHash.Should().NotBeNullOrEmpty();
proof.LogId.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task AnchorAsync_WithUnsignedBundle_ReturnsFail()
{
// Arrange
var bundle = CreateTestBundle();
var options = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
// Act
var result = await _anchorService.AnchorAsync(bundle, options);
// Assert
result.IsSuccess.Should().BeFalse();
result.Error.Should().Contain("must be signed");
}
[Fact]
public async Task AnchorAsync_MultipleSubmissions_GetUniqueLogIndexes()
{
// Arrange
var bundle1 = await CreateSignedBundle();
var bundle2 = await CreateSignedBundle("CVE-2024-5678@pkg:npm/express@4.0.0");
var options = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
// Act
var result1 = await _anchorService.AnchorAsync(bundle1, options);
var result2 = await _anchorService.AnchorAsync(bundle2, options);
// Assert
result1.Linkage!.LogIndex.Should().NotBe(result2.Linkage!.LogIndex);
}
[Fact]
public async Task VerifyAnchorAsync_WithValidAnchor_ReturnsSuccess()
{
// Arrange
var bundle = await CreateSignedBundle();
var anchorOptions = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
var anchoredResult = await _anchorService.AnchorAsync(bundle, anchorOptions);
var verifyOptions = VerdictAnchorVerificationOptions.Default;
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredResult.AnchoredBundle!, verifyOptions);
// Assert
result.IsValid.Should().BeTrue();
result.VerifiedUuid.Should().NotBeNullOrEmpty();
result.VerifiedLogIndex.Should().BeGreaterThan(0);
}
[Fact]
public async Task VerifyAnchorAsync_WithNoAnchor_ReturnsFail()
{
// Arrange
var bundle = await CreateSignedBundle();
var verifyOptions = VerdictAnchorVerificationOptions.Default;
// Act
var result = await _anchorService.VerifyAnchorAsync(bundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("no Rekor anchor");
}
[Fact]
public async Task VerifyAnchorAsync_WithFutureTimestamp_ReturnsFail()
{
// Arrange
var bundle = await CreateSignedBundle();
var futureTime = _timeProvider.GetUtcNow().AddHours(2).ToUnixTimeSeconds();
var anchoredBundle = bundle with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 1,
IntegratedTime = futureTime,
InclusionProof = new InclusionProof
{
TreeSize = 1,
RootHash = "abc123",
Hashes = ImmutableArray<string>.Empty,
LogId = "test-log"
}
}
};
var verifyOptions = VerdictAnchorVerificationOptions.Default;
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("future");
}
[Fact]
public async Task VerifyAnchorAsync_WithOldTimestamp_ReturnsFail_WhenMaxAgeExceeded()
{
// Arrange
var bundle = await CreateSignedBundle();
var oldTime = _timeProvider.GetUtcNow().AddHours(-48).ToUnixTimeSeconds();
var anchoredBundle = bundle with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 1,
IntegratedTime = oldTime,
InclusionProof = new InclusionProof
{
TreeSize = 1,
RootHash = "abc123",
Hashes = ImmutableArray<string>.Empty,
LogId = "test-log"
}
}
};
var verifyOptions = new VerdictAnchorVerificationOptions { MaxAgeHours = 24 };
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("older than");
}
[Fact]
public async Task VerifyAnchorAsync_WithNoInclusionProof_ReturnsFail_WhenRequired()
{
// Arrange
var bundle = await CreateSignedBundle();
var anchoredBundle = bundle with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 1,
IntegratedTime = _timeProvider.GetUtcNow().ToUnixTimeSeconds(),
InclusionProof = null
}
};
var verifyOptions = new VerdictAnchorVerificationOptions { RequireInclusionProof = true };
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("no inclusion proof");
}
[Fact]
public async Task VerifyAnchorAsync_WithNoInclusionProof_ReturnsSuccess_WhenNotRequired()
{
// Arrange
var bundle = await CreateSignedBundle();
var anchoredBundle = bundle with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 1,
IntegratedTime = _timeProvider.GetUtcNow().ToUnixTimeSeconds(),
InclusionProof = null
}
};
var verifyOptions = VerdictAnchorVerificationOptions.Relaxed;
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredBundle, verifyOptions);
// Assert
result.IsValid.Should().BeTrue();
}
[Fact]
public async Task VerifyAnchorAsync_WithInvalidUuid_ReturnsFail()
{
// Arrange
var bundle = await CreateSignedBundle();
var anchoredBundle = bundle with
{
RekorAnchor = new RekorLinkage
{
Uuid = "",
LogIndex = 1,
IntegratedTime = _timeProvider.GetUtcNow().ToUnixTimeSeconds()
}
};
var verifyOptions = VerdictAnchorVerificationOptions.Relaxed;
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Invalid Rekor UUID");
}
[Fact]
public async Task VerifyAnchorAsync_WithNegativeLogIndex_ReturnsFail()
{
// Arrange
var bundle = await CreateSignedBundle();
var anchoredBundle = bundle with
{
RekorAnchor = new RekorLinkage
{
Uuid = "valid-uuid",
LogIndex = -1,
IntegratedTime = _timeProvider.GetUtcNow().ToUnixTimeSeconds()
}
};
var verifyOptions = VerdictAnchorVerificationOptions.Relaxed;
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Invalid log index");
}
[Fact]
public async Task VerifyAnchorAsync_VerifiesInclusionProof_WhenEnabled()
{
// Arrange
var bundle = await CreateSignedBundle();
var anchorOptions = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
var anchoredResult = await _anchorService.AnchorAsync(bundle, anchorOptions);
var verifyOptions = new VerdictAnchorVerificationOptions
{
RequireInclusionProof = true,
VerifyInclusionProof = true
};
// Act
var result = await _anchorService.VerifyAnchorAsync(anchoredResult.AnchoredBundle!, verifyOptions);
// Assert
result.IsValid.Should().BeTrue();
result.VerifiedIntegratedTime.Should().NotBeNull();
}
[Fact]
public async Task AnchorAsync_PreservesOriginalBundleFields()
{
// Arrange
var bundle = await CreateSignedBundle();
var options = new VerdictAnchorOptions { RekorUrl = "https://rekor.example.com" };
// Act
var result = await _anchorService.AnchorAsync(bundle, options);
// Assert
result.AnchoredBundle!.BundleId.Should().Be(bundle.BundleId);
result.AnchoredBundle.FindingId.Should().Be(bundle.FindingId);
result.AnchoredBundle.FinalScore.Should().Be(bundle.FinalScore);
result.AnchoredBundle.DsseSignature.Should().Be(bundle.DsseSignature);
}
private async Task<VerdictBundle> CreateSignedBundle(string? findingId = null)
{
var bundle = CreateTestBundle(findingId);
var options = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
return await _signingService.SignAsync(bundle, options);
}
private static VerdictBundle CreateTestBundle(string? findingId = null)
{
var computedAt = DateTimeOffset.Parse("2026-01-18T12:00:00Z");
return new VerdictBundle
{
BundleId = "sha256:abc123",
FindingId = findingId ?? "CVE-2024-1234@pkg:npm/lodash@4.17.20",
ManifestRef = new ScoringManifestRef
{
ScoringVersion = "v2026-01-18-1",
ManifestDigest = "sha256:manifest123"
},
Inputs = new VerdictInputs
{
Cvss = new CvssInput
{
BaseScore = 7.5,
Version = "3.1",
Source = "nvd",
CapturedAt = computedAt
},
Epss = new EpssInput
{
Probability = 0.42,
Source = "first.org",
CapturedAt = computedAt
},
Reachability = new ReachabilityInputRecord
{
Level = "function",
Value = 0.7,
Source = "stella-scanner",
CapturedAt = computedAt
},
ExploitMaturity = new ExploitMaturityInput
{
Level = "poc",
Value = 0.5,
Source = "nvd",
CapturedAt = computedAt
},
PatchProof = new PatchProofInput
{
Confidence = 0.3,
Source = "stella-verifier",
CapturedAt = computedAt
}
},
Normalization = new NormalizationTrace
{
Dimensions = ImmutableArray.Create(
new DimensionNormalization
{
Dimension = "cvss_base",
Symbol = "CVS",
RawValue = 7.5,
NormalizedValue = 0.75,
Method = "linear",
Weight = 0.25,
Contribution = 0.1875
})
},
RawScore = 0.65,
FinalScore = 0.65,
Gate = new GateDecision
{
Action = GateAction.Block,
Reason = "Score 0.65 exceeds block threshold 0.65",
Threshold = 0.65,
MatchedRules = ["block_threshold"],
Suggestions = ["Review finding urgently"]
},
ComputedAt = computedAt,
BundleDigest = "sha256:abc123"
};
}
}

View File

@@ -0,0 +1,473 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_030_LIB_verdict_rekor_gate_api
// Task: TASK-030-003 - Unit tests for VerdictSigningService
using System.Collections.Immutable;
using System.Text.Json;
using FluentAssertions;
using StellaOps.DeltaVerdict.Bundles;
using StellaOps.DeltaVerdict.Manifest;
namespace StellaOps.DeltaVerdict.Tests.Bundles;
public class VerdictSigningServiceTests
{
private readonly VerdictSigningService _signingService = new();
private readonly string _testSecret = Convert.ToBase64String(new byte[32] {
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10,
0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20
});
[Fact]
public async Task SignAsync_WithValidBundle_ReturnsSignedBundle()
{
// Arrange
var bundle = CreateTestBundle();
var options = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var signedBundle = await _signingService.SignAsync(bundle, options);
// Assert
signedBundle.DsseSignature.Should().NotBeNullOrEmpty();
signedBundle.BundleId.Should().Be(bundle.BundleId);
signedBundle.FindingId.Should().Be(bundle.FindingId);
signedBundle.FinalScore.Should().Be(bundle.FinalScore);
}
[Fact]
public async Task SignAsync_ProducesValidDsseEnvelope()
{
// Arrange
var bundle = CreateTestBundle();
var options = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var signedBundle = await _signingService.SignAsync(bundle, options);
// Assert
var envelope = JsonSerializer.Deserialize<VerdictDsseEnvelope>(signedBundle.DsseSignature!);
envelope.Should().NotBeNull();
envelope!.PayloadType.Should().Be(VerdictSigningService.PayloadType);
envelope.Payload.Should().NotBeNullOrEmpty();
envelope.Signatures.Should().HaveCount(1);
envelope.Signatures[0].KeyId.Should().Be("test-key-1");
envelope.Signatures[0].Sig.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task VerifyAsync_WithValidSignature_ReturnsSuccess()
{
// Arrange
var bundle = CreateTestBundle();
var signingOptions = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedBundle = await _signingService.SignAsync(bundle, signingOptions);
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var result = await _signingService.VerifyAsync(signedBundle, verifyOptions);
// Assert
result.IsValid.Should().BeTrue();
result.VerifiedKeyId.Should().Be("test-key-1");
result.Error.Should().BeNull();
}
[Fact]
public async Task VerifyAsync_WithWrongSecret_ReturnsFail()
{
// Arrange
var bundle = CreateTestBundle();
var signingOptions = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedBundle = await _signingService.SignAsync(bundle, signingOptions);
var wrongSecret = Convert.ToBase64String(new byte[32]);
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = wrongSecret
};
// Act
var result = await _signingService.VerifyAsync(signedBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Signature verification failed");
}
[Fact]
public async Task VerifyAsync_WithWrongKeyId_ReturnsFail()
{
// Arrange
var bundle = CreateTestBundle();
var signingOptions = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedBundle = await _signingService.SignAsync(bundle, signingOptions);
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "different-key",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var result = await _signingService.VerifyAsync(signedBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Signature verification failed");
}
[Fact]
public async Task VerifyAsync_WithUnsignedBundle_ReturnsFail()
{
// Arrange
var bundle = CreateTestBundle();
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var result = await _signingService.VerifyAsync(bundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Bundle is not signed");
}
[Fact]
public async Task VerifyAsync_WithInvalidEnvelope_ReturnsFail()
{
// Arrange
var bundle = CreateTestBundle() with { DsseSignature = "invalid json" };
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var result = await _signingService.VerifyAsync(bundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Invalid signature envelope");
}
[Fact]
public async Task VerifyAsync_DetectsTamperedContent()
{
// Arrange
var bundle = CreateTestBundle();
var signingOptions = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedBundle = await _signingService.SignAsync(bundle, signingOptions);
// Tamper with the content
var tamperedBundle = signedBundle with { FinalScore = 0.99 };
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var result = await _signingService.VerifyAsync(tamperedBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("modified");
}
[Fact]
public async Task SignAsync_WithSha256Algorithm_ProducesSignature()
{
// Arrange
var bundle = CreateTestBundle();
var options = new VerdictSigningOptions
{
KeyId = "test-key-sha256",
Algorithm = VerdictSigningAlgorithm.Sha256
};
// Act
var signedBundle = await _signingService.SignAsync(bundle, options);
// Assert
signedBundle.DsseSignature.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task SignAsync_SigningTwiceWithSameOptions_ProducesSameSignature()
{
// Arrange
var bundle = CreateTestBundle();
var options = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var signedBundle1 = await _signingService.SignAsync(bundle, options);
var signedBundle2 = await _signingService.SignAsync(bundle, options);
// Assert
signedBundle1.DsseSignature.Should().Be(signedBundle2.DsseSignature);
}
[Fact]
public void GetCanonicalJson_IsDeterministic()
{
// Arrange
var bundle = CreateTestBundle();
// Act
var json1 = _signingService.GetCanonicalJson(bundle);
var json2 = _signingService.GetCanonicalJson(bundle);
// Assert
json1.Should().Be(json2);
}
[Fact]
public void GetCanonicalJson_ExcludesSignatureAndRekorFields()
{
// Arrange
var bundle = CreateTestBundle() with
{
DsseSignature = "some-signature",
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 123,
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds()
}
};
// Act
var json = _signingService.GetCanonicalJson(bundle);
// Assert
json.Should().NotContain("dsse_signature");
json.Should().NotContain("rekor_anchor");
json.Should().Contain("finding_id");
json.Should().Contain("final_score");
}
[Fact]
public async Task SignAsync_WithExistingSignature_ReplacesSignature()
{
// Arrange
var bundle = CreateTestBundle();
var options1 = new VerdictSigningOptions
{
KeyId = "key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var options2 = new VerdictSigningOptions
{
KeyId = "key-2",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var firstSigned = await _signingService.SignAsync(bundle, options1);
var secondSigned = await _signingService.SignAsync(firstSigned, options2);
// Assert
var envelope = JsonSerializer.Deserialize<VerdictDsseEnvelope>(secondSigned.DsseSignature!);
envelope!.Signatures.Should().HaveCount(1);
envelope.Signatures[0].KeyId.Should().Be("key-2");
}
[Fact]
public async Task VerifyAsync_WithWrongPayloadType_ReturnsFail()
{
// Arrange
var bundle = CreateTestBundle();
var signingOptions = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedBundle = await _signingService.SignAsync(bundle, signingOptions);
// Tamper with envelope to change payload type
var envelope = JsonSerializer.Deserialize<VerdictDsseEnvelope>(signedBundle.DsseSignature!);
var tamperedEnvelope = new VerdictDsseEnvelope(
"application/wrong-type",
envelope!.Payload,
envelope.Signatures);
var tamperedBundle = signedBundle with
{
DsseSignature = JsonSerializer.Serialize(tamperedEnvelope)
};
var verifyOptions = new VerdictVerificationOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
// Act
var result = await _signingService.VerifyAsync(tamperedBundle, verifyOptions);
// Assert
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Invalid payload type");
}
[Fact]
public async Task SignAsync_ThrowsOnMissingHmacSecret()
{
// Arrange
var bundle = CreateTestBundle();
var options = new VerdictSigningOptions
{
KeyId = "test-key-1",
Algorithm = VerdictSigningAlgorithm.HmacSha256,
SecretBase64 = null
};
// Act & Assert
await Assert.ThrowsAsync<InvalidOperationException>(
() => _signingService.SignAsync(bundle, options));
}
[Fact]
public void PayloadType_IsCorrect()
{
// Assert
VerdictSigningService.PayloadType.Should().Be("application/vnd.stella.scoring.v1+json");
}
private static VerdictBundle CreateTestBundle()
{
var computedAt = DateTimeOffset.Parse("2026-01-18T12:00:00Z");
return new VerdictBundle
{
BundleId = "sha256:abc123",
FindingId = "CVE-2024-1234@pkg:npm/lodash@4.17.20",
ManifestRef = new ScoringManifestRef
{
ScoringVersion = "v2026-01-18-1",
ManifestDigest = "sha256:manifest123"
},
Inputs = new VerdictInputs
{
Cvss = new CvssInput
{
BaseScore = 7.5,
Version = "3.1",
Source = "nvd",
CapturedAt = computedAt
},
Epss = new EpssInput
{
Probability = 0.42,
Source = "first.org",
CapturedAt = computedAt
},
Reachability = new ReachabilityInputRecord
{
Level = "function",
Value = 0.7,
Source = "stella-scanner",
CapturedAt = computedAt
},
ExploitMaturity = new ExploitMaturityInput
{
Level = "poc",
Value = 0.5,
Source = "nvd",
CapturedAt = computedAt
},
PatchProof = new PatchProofInput
{
Confidence = 0.3,
Source = "stella-verifier",
CapturedAt = computedAt
}
},
Normalization = new NormalizationTrace
{
Dimensions = ImmutableArray.Create(
new DimensionNormalization
{
Dimension = "cvss_base",
Symbol = "CVS",
RawValue = 7.5,
NormalizedValue = 0.75,
Method = "linear",
Weight = 0.25,
Contribution = 0.1875
})
},
RawScore = 0.65,
FinalScore = 0.65,
Gate = new GateDecision
{
Action = GateAction.Block,
Reason = "Score 0.65 exceeds block threshold 0.65",
Threshold = 0.65,
MatchedRules = ["block_threshold"],
Suggestions = ["Review finding urgently"]
},
ComputedAt = computedAt,
BundleDigest = "sha256:abc123"
};
}
}

View File

@@ -0,0 +1,221 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-001 - Unit tests for ScoringManifest model
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.DeltaVerdict.Manifest;
namespace StellaOps.DeltaVerdict.Tests.Manifest;
public class ScoringManifestTests
{
[Fact]
public void ScoringManifest_Instantiation_WithAllRequiredFields()
{
// Arrange & Act
var manifest = CreateTestManifest();
// Assert
manifest.Should().NotBeNull();
manifest.SchemaVersion.Should().Be(ScoringManifest.CurrentSchemaVersion);
manifest.ScoringVersion.Should().Be("v2026-01-18-1");
manifest.Weights.Should().NotBeNull();
manifest.Normalizers.Should().NotBeNull();
manifest.TrustedVexKeys.Should().HaveCount(2);
manifest.CodeHash.Should().StartWith("sha256:");
}
[Fact]
public void ScoringWeights_Default_SumsToOne()
{
// Arrange & Act
var weights = ScoringWeights.Default;
// Assert
weights.Sum.Should().BeApproximately(1.0, 0.01);
}
[Fact]
public void ScoringWeights_Validate_ReturnsNoErrors_WhenValid()
{
// Arrange
var weights = ScoringWeights.Default;
// Act
var errors = weights.Validate();
// Assert
errors.Should().BeEmpty();
}
[Fact]
public void ScoringWeights_Validate_ReturnsErrors_WhenWeightOutOfRange()
{
// Arrange
var weights = new ScoringWeights
{
CvssBase = 1.5, // Invalid: > 1.0
Epss = 0.2,
Reachability = 0.25,
ExploitMaturity = 0.15,
PatchProofConfidence = 0.15
};
// Act
var errors = weights.Validate();
// Assert
errors.Should().Contain(e => e.Contains("cvss_base") && e.Contains("range"));
}
[Fact]
public void ScoringWeights_Validate_ReturnsErrors_WhenNaN()
{
// Arrange
var weights = new ScoringWeights
{
CvssBase = double.NaN,
Epss = 0.2,
Reachability = 0.25,
ExploitMaturity = 0.15,
PatchProofConfidence = 0.15
};
// Act
var errors = weights.Validate();
// Assert
errors.Should().Contain(e => e.Contains("cvss_base") && e.Contains("valid number"));
}
[Fact]
public void ScoringNormalizers_Default_HasExpectedRanges()
{
// Arrange & Act
var normalizers = ScoringNormalizers.Default;
// Assert
normalizers.CvssRange.Min.Should().Be(0.0);
normalizers.CvssRange.Max.Should().Be(10.0);
normalizers.EpssRange.Min.Should().Be(0.0);
normalizers.EpssRange.Max.Should().Be(1.0);
}
[Fact]
public void NormalizerRange_Normalize_ReturnsExpectedValues()
{
// Arrange
var range = new NormalizerRange { Min = 0.0, Max = 10.0 };
// Act & Assert
range.Normalize(0.0).Should().Be(0.0);
range.Normalize(5.0).Should().Be(0.5);
range.Normalize(10.0).Should().Be(1.0);
}
[Fact]
public void NormalizerRange_Normalize_ClampsValues()
{
// Arrange
var range = new NormalizerRange { Min = 0.0, Max = 10.0 };
// Act & Assert
range.Normalize(-5.0).Should().Be(0.0); // Clamped to min
range.Normalize(15.0).Should().Be(1.0); // Clamped to max
}
[Fact]
public void NormalizerRange_Normalize_HandlesZeroRange()
{
// Arrange
var range = new NormalizerRange { Min = 5.0, Max = 5.0 };
// Act
var result = range.Normalize(5.0);
// Assert
result.Should().Be(0.0); // Zero-width range returns 0
}
[Fact]
public void RekorLinkage_Instantiation_WithRequiredFields()
{
// Arrange & Act
var linkage = new RekorLinkage
{
Uuid = "abc123",
LogIndex = 12345,
IntegratedTime = 1705593600
};
// Assert
linkage.Uuid.Should().Be("abc123");
linkage.LogIndex.Should().Be(12345);
linkage.IntegratedTime.Should().Be(1705593600);
linkage.InclusionProof.Should().BeNull();
}
[Fact]
public void InclusionProof_Instantiation_WithRequiredFields()
{
// Arrange & Act
var proof = new InclusionProof
{
TreeSize = 1000000,
RootHash = "sha256:abc123",
Hashes = ImmutableArray.Create("hash1", "hash2", "hash3"),
LogId = "sigstore-log-1"
};
// Assert
proof.TreeSize.Should().Be(1000000);
proof.RootHash.Should().Be("sha256:abc123");
proof.Hashes.Should().HaveCount(3);
proof.LogId.Should().Be("sigstore-log-1");
}
[Fact]
public void ScoringManifest_IsImmutable_ViaRecordSemantics()
{
// Arrange
var original = CreateTestManifest();
// Act - Using 'with' expression creates a new instance
var modified = original with { ScoringVersion = "v2026-01-18-2" };
// Assert
original.ScoringVersion.Should().Be("v2026-01-18-1");
modified.ScoringVersion.Should().Be("v2026-01-18-2");
original.Should().NotBeSameAs(modified);
}
[Fact]
public void ScoringManifest_TrustedVexKeys_IsImmutableArray()
{
// Arrange
var manifest = CreateTestManifest();
// Act
var keys = manifest.TrustedVexKeys;
// Assert
keys.Should().BeOfType<ImmutableArray<string>>();
keys.IsDefault.Should().BeFalse();
}
private static ScoringManifest CreateTestManifest()
{
return new ScoringManifest
{
SchemaVersion = ScoringManifest.CurrentSchemaVersion,
ScoringVersion = "v2026-01-18-1",
Weights = ScoringWeights.Default,
Normalizers = ScoringNormalizers.Default,
TrustedVexKeys = ImmutableArray.Create("key-fingerprint-1", "key-fingerprint-2"),
CodeHash = "sha256:abc123def456",
CreatedAt = DateTimeOffset.UtcNow
};
}
}

View File

@@ -0,0 +1,520 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-006 - Manifest Version Bump Workflow
using System.Collections.Immutable;
using System.Security.Cryptography;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.DeltaVerdict.Manifest;
using StellaOps.DeltaVerdict.Signing;
using Xunit;
namespace StellaOps.DeltaVerdict.Tests.Manifest;
public class ScoringManifestVersionerTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly ScoringManifestVersioner _versioner;
public ScoringManifestVersionerTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero));
_versioner = new ScoringManifestVersioner(
new ScoringManifestSigningService(),
null,
_timeProvider);
}
private static ScoringManifest CreateTestManifest(string version = "v2026-01-17-1")
{
return new ScoringManifest
{
SchemaVersion = ScoringManifest.CurrentSchemaVersion,
ScoringVersion = version,
Weights = ScoringWeights.Default,
Normalizers = ScoringNormalizers.Default,
TrustedVexKeys = ImmutableArray.Create("key1", "key2"),
CodeHash = "sha256:abc123def456",
CreatedAt = new DateTimeOffset(2026, 1, 17, 12, 0, 0, TimeSpan.Zero)
};
}
#region Compare Tests
[Fact]
public void Compare_IdenticalManifests_RequiresNoBump()
{
var manifest = CreateTestManifest();
var result = _versioner.Compare(manifest, manifest);
result.RequiresBump.Should().BeFalse();
result.Changes.Should().BeEmpty();
}
[Fact]
public void Compare_DifferentWeights_RequiresBump()
{
var current = CreateTestManifest();
var proposed = current with
{
Weights = ScoringWeights.Default with { CvssBase = 0.30 }
};
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().ContainSingle();
result.Changes[0].Field.Should().Be("weights.cvss_base");
result.Changes[0].ChangeType.Should().Be(ManifestChangeType.WeightChange);
}
[Fact]
public void Compare_MultipleWeightChanges_ReportsAll()
{
var current = CreateTestManifest();
var proposed = current with
{
Weights = new ScoringWeights
{
CvssBase = 0.30,
Epss = 0.25,
Reachability = ScoringWeights.Default.Reachability,
ExploitMaturity = ScoringWeights.Default.ExploitMaturity,
PatchProofConfidence = ScoringWeights.Default.PatchProofConfidence
}
};
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().HaveCount(2);
result.Changes.Should().Contain(c => c.Field == "weights.cvss_base");
result.Changes.Should().Contain(c => c.Field == "weights.epss");
}
[Fact]
public void Compare_DifferentTrustedKeys_RequiresBump()
{
var current = CreateTestManifest();
var proposed = current with
{
TrustedVexKeys = ImmutableArray.Create("key1", "key2", "key3")
};
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().Contain(c => c.Field == "trusted_vex_keys.added");
result.Changes[0].ChangeType.Should().Be(ManifestChangeType.TrustChange);
}
[Fact]
public void Compare_RemovedTrustedKey_RequiresBump()
{
var current = CreateTestManifest();
var proposed = current with
{
TrustedVexKeys = ImmutableArray.Create("key1")
};
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().Contain(c => c.Field == "trusted_vex_keys.removed");
}
[Fact]
public void Compare_DifferentCodeHash_RequiresBump()
{
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().ContainSingle();
result.Changes[0].Field.Should().Be("code_hash");
result.Changes[0].ChangeType.Should().Be(ManifestChangeType.CodeChange);
}
[Fact]
public void Compare_DifferentSchemaVersion_RequiresBump()
{
var current = CreateTestManifest();
var proposed = current with { SchemaVersion = "stella-scoring/2.0.0" };
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().ContainSingle();
result.Changes[0].Field.Should().Be("schema_version");
result.Changes[0].ChangeType.Should().Be(ManifestChangeType.SchemaChange);
}
[Fact]
public void Compare_DifferentNormalizerRange_RequiresBump()
{
var current = CreateTestManifest();
var proposed = current with
{
Normalizers = ScoringNormalizers.Default with
{
CvssRange = new NormalizerRange { Min = 0, Max = 15 }
}
};
var result = _versioner.Compare(current, proposed);
result.RequiresBump.Should().BeTrue();
result.Changes.Should().ContainSingle();
result.Changes[0].Field.Should().Be("normalizers.cvss_range");
result.Changes[0].ChangeType.Should().Be(ManifestChangeType.NormalizerChange);
}
[Fact]
public void Compare_ReturnsDigests()
{
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var result = _versioner.Compare(current, proposed);
result.CurrentDigest.Should().NotBeNullOrEmpty();
result.ProposedDigest.Should().NotBeNullOrEmpty();
result.CurrentDigest.Should().NotBe(result.ProposedDigest);
}
#endregion
#region GenerateNextVersion Tests
[Fact]
public void GenerateNextVersion_NullVersion_StartsAtOne()
{
var version = _versioner.GenerateNextVersion(null);
version.Should().Be("v2026-01-18-1");
}
[Fact]
public void GenerateNextVersion_EmptyVersion_StartsAtOne()
{
var version = _versioner.GenerateNextVersion("");
version.Should().Be("v2026-01-18-1");
}
[Fact]
public void GenerateNextVersion_SameDay_IncrementsSequence()
{
var version = _versioner.GenerateNextVersion("v2026-01-18-1");
version.Should().Be("v2026-01-18-2");
}
[Fact]
public void GenerateNextVersion_SameDay_MultipleIncrements()
{
var version1 = _versioner.GenerateNextVersion("v2026-01-18-5");
var version2 = _versioner.GenerateNextVersion("v2026-01-18-99");
version1.Should().Be("v2026-01-18-6");
version2.Should().Be("v2026-01-18-100");
}
[Fact]
public void GenerateNextVersion_DifferentDay_ResetsSequence()
{
var version = _versioner.GenerateNextVersion("v2026-01-17-5");
version.Should().Be("v2026-01-18-1");
}
[Fact]
public void GenerateNextVersion_InvalidFormat_StartsAtOne()
{
var version = _versioner.GenerateNextVersion("invalid");
version.Should().Be("v2026-01-18-1");
}
[Fact]
public void GenerateNextVersion_UsesTimeProvider()
{
_timeProvider.SetUtcNow(new DateTimeOffset(2026, 6, 15, 0, 0, 0, TimeSpan.Zero));
var version = _versioner.GenerateNextVersion(null);
version.Should().Be("v2026-06-15-1");
}
#endregion
#region Bump Tests
[Fact]
public void Bump_NoChanges_ReturnsNoBumpRequired()
{
var manifest = CreateTestManifest();
var result = _versioner.Bump(manifest, manifest, "No reason");
result.IsSuccess.Should().BeTrue();
result.BumpRequired.Should().BeFalse();
result.BumpedManifest.Should().Be(manifest);
}
[Fact]
public void Bump_WithChanges_CreatesNewVersion()
{
var current = CreateTestManifest("v2026-01-17-1");
var proposed = current with
{
Weights = ScoringWeights.Default with { CvssBase = 0.30 }
};
var result = _versioner.Bump(current, proposed, "Updated CVSS weight");
result.IsSuccess.Should().BeTrue();
result.BumpRequired.Should().BeTrue();
result.BumpedManifest.Should().NotBeNull();
result.BumpedManifest!.ScoringVersion.Should().Be("v2026-01-18-1");
}
[Fact]
public void Bump_PreservesProposedChanges()
{
var current = CreateTestManifest();
var proposed = current with
{
Weights = new ScoringWeights
{
CvssBase = 0.30,
Epss = 0.25,
Reachability = 0.20,
ExploitMaturity = 0.15,
PatchProofConfidence = 0.10
}
};
var result = _versioner.Bump(current, proposed, "Updated all weights");
result.BumpedManifest!.Weights.CvssBase.Should().Be(0.30);
result.BumpedManifest.Weights.Epss.Should().Be(0.25);
}
[Fact]
public void Bump_UpdatesCreatedAt()
{
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var result = _versioner.Bump(current, proposed, "Updated code");
result.BumpedManifest!.CreatedAt.Should().Be(_timeProvider.GetUtcNow());
}
[Fact]
public void Bump_ClearsSignatureAndAnchor()
{
var current = CreateTestManifest() with
{
ManifestDigest = "sha256:old",
DsseSignature = "old-sig",
RekorAnchor = new RekorLinkage
{
Uuid = "old-uuid",
LogIndex = 1,
IntegratedTime = 1234567890
}
};
var proposed = current with { CodeHash = "sha256:newcodehash" };
var result = _versioner.Bump(current, proposed, "Updated code");
result.BumpedManifest!.ManifestDigest.Should().BeNull();
result.BumpedManifest.DsseSignature.Should().BeNull();
result.BumpedManifest.RekorAnchor.Should().BeNull();
}
[Fact]
public void Bump_CreatesHistoryEntry()
{
var current = CreateTestManifest("v2026-01-17-2");
var proposed = current with { CodeHash = "sha256:newcodehash" };
var result = _versioner.Bump(current, proposed, "Algorithm update");
result.HistoryEntry.Should().NotBeNull();
result.HistoryEntry!.PreviousVersion.Should().Be("v2026-01-17-2");
result.HistoryEntry.NewVersion.Should().Be("v2026-01-18-1");
result.HistoryEntry.Reason.Should().Be("Algorithm update");
result.HistoryEntry.BumpedAt.Should().Be(_timeProvider.GetUtcNow());
result.HistoryEntry.Changes.Should().NotBeEmpty();
}
[Fact]
public void Bump_IncludesComparison()
{
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var result = _versioner.Bump(current, proposed, "Algorithm update");
result.Comparison.Should().NotBeNull();
result.Comparison!.RequiresBump.Should().BeTrue();
}
[Fact]
public void Bump_EmptyReason_Throws()
{
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var act = () => _versioner.Bump(current, proposed, "");
act.Should().Throw<ArgumentException>();
}
#endregion
#region BumpAndSignAsync Tests
[Fact]
public async Task BumpAndSignAsync_SignsManifest()
{
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var testSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
var signingOptions = new ManifestSigningOptions
{
KeyId = "test-key",
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = testSecret
};
var result = await _versioner.BumpAndSignAsync(current, proposed, "Updated", signingOptions);
result.IsSuccess.Should().BeTrue();
result.BumpedManifest.Should().NotBeNull();
result.BumpedManifest!.ManifestDigest.Should().NotBeNull();
result.BumpedManifest.DsseSignature.Should().NotBeNull();
}
[Fact]
public async Task BumpAndSignAsync_NoChanges_ReturnsOriginal()
{
var manifest = CreateTestManifest();
var testSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
var signingOptions = new ManifestSigningOptions
{
KeyId = "test-key",
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = testSecret
};
var result = await _versioner.BumpAndSignAsync(manifest, manifest, "No changes", signingOptions);
result.IsSuccess.Should().BeTrue();
result.BumpRequired.Should().BeFalse();
}
[Fact]
public async Task BumpAndSignAsync_WithAnchorService_AnchorsManifest()
{
var signingService = new ScoringManifestSigningService();
var stubRekorClient = new StubRekorSubmissionClient(_timeProvider);
var anchorService = new ScoringManifestRekorAnchorService(stubRekorClient, _timeProvider);
var versioner = new ScoringManifestVersioner(signingService, anchorService, _timeProvider);
var current = CreateTestManifest();
var proposed = current with { CodeHash = "sha256:newcodehash" };
var testSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
var signingOptions = new ManifestSigningOptions
{
KeyId = "test-key",
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = testSecret
};
var anchorOptions = new ManifestAnchorOptions
{
RekorUrl = "https://rekor.sigstore.dev"
};
var result = await versioner.BumpAndSignAsync(current, proposed, "Updated", signingOptions, anchorOptions);
result.IsSuccess.Should().BeTrue();
result.BumpedManifest.Should().NotBeNull();
result.BumpedManifest!.RekorAnchor.Should().NotBeNull();
}
#endregion
#region Integration Tests
[Fact]
public async Task FullWorkflow_Bump_Sign_Verify()
{
var signingService = new ScoringManifestSigningService();
var versioner = new ScoringManifestVersioner(signingService, null, _timeProvider);
var current = CreateTestManifest("v2026-01-17-3");
var proposed = current with
{
Weights = ScoringWeights.Default with { Epss = 0.25 }
};
var testSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
var signingOptions = new ManifestSigningOptions
{
KeyId = "test-key",
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = testSecret
};
// Bump and sign
var bumpResult = await versioner.BumpAndSignAsync(current, proposed, "Updated EPSS weight", signingOptions);
bumpResult.IsSuccess.Should().BeTrue();
// Verify signature
var verifyOptions = new ManifestVerificationOptions
{
KeyId = "test-key",
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = testSecret
};
var verifyResult = await signingService.VerifyAsync(bumpResult.BumpedManifest!, verifyOptions);
verifyResult.IsValid.Should().BeTrue();
}
[Fact]
public void HistoryEntry_TracksAllChanges()
{
var current = CreateTestManifest();
var proposed = current with
{
Weights = new ScoringWeights
{
CvssBase = 0.30,
Epss = 0.25,
Reachability = 0.20,
ExploitMaturity = 0.15,
PatchProofConfidence = 0.10
},
CodeHash = "sha256:newcodehash"
};
var result = _versioner.Bump(current, proposed, "Major update");
result.HistoryEntry!.Changes.Should().HaveCountGreaterThan(1);
result.HistoryEntry.Changes.Should().Contain(c => c.Field == "code_hash");
result.HistoryEntry.Changes.Should().Contain(c => c.Field.StartsWith("weights."));
}
#endregion
}

View File

@@ -0,0 +1,459 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-005 - Scoring Manifest Rekor Anchoring
using System.Collections.Immutable;
using System.Security.Cryptography;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.DeltaVerdict.Manifest;
using StellaOps.DeltaVerdict.Signing;
using Xunit;
namespace StellaOps.DeltaVerdict.Tests.Signing;
public class ScoringManifestRekorAnchorServiceTests
{
private readonly ScoringManifestSigningService _signingService = new();
private readonly string _testSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
private const string TestKeyId = "test-key-001";
private const string TestRekorUrl = "https://rekor.sigstore.dev";
private static ScoringManifest CreateTestManifest()
{
return new ScoringManifest
{
SchemaVersion = ScoringManifest.CurrentSchemaVersion,
ScoringVersion = "v2026-01-18-1",
Weights = ScoringWeights.Default,
Normalizers = ScoringNormalizers.Default,
TrustedVexKeys = ImmutableArray.Create("key1", "key2"),
CodeHash = "sha256:abc123def456",
CreatedAt = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero)
};
}
private async Task<ScoringManifest> CreateSignedManifest()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
return await _signingService.SignAsync(manifest, signingOptions);
}
#region Anchor Tests
[Fact]
public async Task AnchorAsync_SignedManifest_Succeeds()
{
var signedManifest = await CreateSignedManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var options = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result = await service.AnchorAsync(signedManifest, options);
result.IsSuccess.Should().BeTrue();
result.AnchoredManifest.Should().NotBeNull();
result.Linkage.Should().NotBeNull();
result.Error.Should().BeNull();
}
[Fact]
public async Task AnchorAsync_PopulatesRekorLinkage()
{
var signedManifest = await CreateSignedManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var options = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result = await service.AnchorAsync(signedManifest, options);
result.AnchoredManifest!.RekorAnchor.Should().NotBeNull();
result.AnchoredManifest.RekorAnchor!.Uuid.Should().NotBeNullOrEmpty();
result.AnchoredManifest.RekorAnchor.LogIndex.Should().BeGreaterThan(0);
result.AnchoredManifest.RekorAnchor.IntegratedTime.Should().BeGreaterThan(0);
}
[Fact]
public async Task AnchorAsync_PopulatesInclusionProof()
{
var signedManifest = await CreateSignedManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var options = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result = await service.AnchorAsync(signedManifest, options);
result.AnchoredManifest!.RekorAnchor!.InclusionProof.Should().NotBeNull();
result.AnchoredManifest.RekorAnchor.InclusionProof!.TreeSize.Should().BeGreaterThan(0);
result.AnchoredManifest.RekorAnchor.InclusionProof.RootHash.Should().NotBeNullOrEmpty();
result.AnchoredManifest.RekorAnchor.InclusionProof.LogId.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task AnchorAsync_UnsignedManifest_Fails()
{
var unsignedManifest = CreateTestManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var options = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result = await service.AnchorAsync(unsignedManifest, options);
result.IsSuccess.Should().BeFalse();
result.Error.Should().Contain("signed");
}
[Fact]
public async Task AnchorAsync_InvalidDsseSignature_Fails()
{
var manifest = CreateTestManifest() with { DsseSignature = "invalid json {" };
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var options = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result = await service.AnchorAsync(manifest, options);
result.IsSuccess.Should().BeFalse();
result.Error.Should().Contain("Invalid DSSE signature");
}
[Fact]
public async Task AnchorAsync_SameManifest_ProducesSameUuid()
{
// Deterministic UUID based on content
var signedManifest = await CreateSignedManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var options = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result1 = await service.AnchorAsync(signedManifest, options);
// Create a new service instance but resubmit same manifest
var stubClient2 = new StubRekorSubmissionClient();
var service2 = new ScoringManifestRekorAnchorService(stubClient2);
var result2 = await service2.AnchorAsync(signedManifest, options);
// UUIDs should be the same since they're derived from content
result1.AnchoredManifest!.RekorAnchor!.Uuid.Should().Be(
result2.AnchoredManifest!.RekorAnchor!.Uuid);
}
#endregion
#region Verify Anchor Tests
[Fact]
public async Task VerifyAnchorAsync_ValidAnchor_Succeeds()
{
var signedManifest = await CreateSignedManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var anchorOptions = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var anchorResult = await service.AnchorAsync(signedManifest, anchorOptions);
var verifyOptions = new ManifestAnchorVerificationOptions
{
RequireInclusionProof = true,
VerifyInclusionProof = true
};
var result = await service.VerifyAnchorAsync(anchorResult.AnchoredManifest!, verifyOptions);
result.IsValid.Should().BeTrue();
result.VerifiedUuid.Should().NotBeNullOrEmpty();
result.VerifiedLogIndex.Should().BeGreaterThan(0);
result.VerifiedIntegratedTime.Should().NotBeNull();
}
[Fact]
public async Task VerifyAnchorAsync_NoAnchor_Fails()
{
var signedManifest = await CreateSignedManifest();
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var verifyOptions = new ManifestAnchorVerificationOptions();
var result = await service.VerifyAnchorAsync(signedManifest, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("no Rekor anchor");
}
[Fact]
public async Task VerifyAnchorAsync_NoInclusionProof_FailsWhenRequired()
{
var signedManifest = await CreateSignedManifest();
// Manually add anchor without inclusion proof
var manifestWithAnchor = signedManifest with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 123,
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
InclusionProof = null
}
};
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var verifyOptions = new ManifestAnchorVerificationOptions
{
RequireInclusionProof = true
};
var result = await service.VerifyAnchorAsync(manifestWithAnchor, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("no inclusion proof");
}
[Fact]
public async Task VerifyAnchorAsync_NoInclusionProof_SucceedsWhenNotRequired()
{
var signedManifest = await CreateSignedManifest();
var manifestWithAnchor = signedManifest with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 123,
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
InclusionProof = null
}
};
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var verifyOptions = new ManifestAnchorVerificationOptions
{
RequireInclusionProof = false
};
var result = await service.VerifyAnchorAsync(manifestWithAnchor, verifyOptions);
result.IsValid.Should().BeTrue();
}
[Fact]
public async Task VerifyAnchorAsync_FutureTimestamp_Fails()
{
var signedManifest = await CreateSignedManifest();
var futureTime = DateTimeOffset.UtcNow.AddHours(1).ToUnixTimeSeconds();
var manifestWithAnchor = signedManifest with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 123,
IntegratedTime = futureTime,
InclusionProof = new InclusionProof
{
TreeSize = 100,
RootHash = "abc123",
Hashes = ImmutableArray<string>.Empty,
LogId = "test-log"
}
}
};
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var verifyOptions = new ManifestAnchorVerificationOptions();
var result = await service.VerifyAnchorAsync(manifestWithAnchor, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("future");
}
[Fact]
public async Task VerifyAnchorAsync_OldAnchor_FailsWhenMaxAgeExceeded()
{
var signedManifest = await CreateSignedManifest();
var oldTime = DateTimeOffset.UtcNow.AddHours(-25).ToUnixTimeSeconds();
var manifestWithAnchor = signedManifest with
{
RekorAnchor = new RekorLinkage
{
Uuid = "test-uuid",
LogIndex = 123,
IntegratedTime = oldTime,
InclusionProof = new InclusionProof
{
TreeSize = 100,
RootHash = "abc123",
Hashes = ImmutableArray<string>.Empty,
LogId = "test-log"
}
}
};
var stubClient = new StubRekorSubmissionClient();
var service = new ScoringManifestRekorAnchorService(stubClient);
var verifyOptions = new ManifestAnchorVerificationOptions
{
MaxAgeHours = 24
};
var result = await service.VerifyAnchorAsync(manifestWithAnchor, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("older than 24 hours");
}
#endregion
#region Integration Tests
[Fact]
public async Task FullWorkflow_Sign_Anchor_Verify_Succeeds()
{
// Sign
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _signingService.SignAsync(manifest, signingOptions);
// Anchor
var stubClient = new StubRekorSubmissionClient();
var anchorService = new ScoringManifestRekorAnchorService(stubClient);
var anchorOptions = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var anchorResult = await anchorService.AnchorAsync(signedManifest, anchorOptions);
anchorResult.IsSuccess.Should().BeTrue();
// Verify signing
var verifySigningOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signatureResult = await _signingService.VerifyAsync(anchorResult.AnchoredManifest!, verifySigningOptions);
signatureResult.IsValid.Should().BeTrue();
// Verify anchor
var verifyAnchorOptions = new ManifestAnchorVerificationOptions
{
RequireInclusionProof = true,
VerifyInclusionProof = true
};
var anchorVerifyResult = await anchorService.VerifyAnchorAsync(anchorResult.AnchoredManifest!, verifyAnchorOptions);
anchorVerifyResult.IsValid.Should().BeTrue();
}
[Fact]
public async Task FullWorkflow_PreservesManifestData()
{
var originalManifest = CreateTestManifest();
// Sign
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _signingService.SignAsync(originalManifest, signingOptions);
// Anchor
var stubClient = new StubRekorSubmissionClient();
var anchorService = new ScoringManifestRekorAnchorService(stubClient);
var anchorOptions = new ManifestAnchorOptions { RekorUrl = TestRekorUrl };
var result = await anchorService.AnchorAsync(signedManifest, anchorOptions);
// Verify original data is preserved
var finalManifest = result.AnchoredManifest!;
finalManifest.SchemaVersion.Should().Be(originalManifest.SchemaVersion);
finalManifest.ScoringVersion.Should().Be(originalManifest.ScoringVersion);
finalManifest.Weights.Should().Be(originalManifest.Weights);
finalManifest.Normalizers.Should().Be(originalManifest.Normalizers);
finalManifest.TrustedVexKeys.Should().BeEquivalentTo(originalManifest.TrustedVexKeys);
finalManifest.CodeHash.Should().Be(originalManifest.CodeHash);
finalManifest.CreatedAt.Should().Be(originalManifest.CreatedAt);
// And adds signing/anchoring data
finalManifest.ManifestDigest.Should().NotBeNull();
finalManifest.DsseSignature.Should().NotBeNull();
finalManifest.RekorAnchor.Should().NotBeNull();
}
#endregion
#region Stub Client Tests
[Fact]
public async Task StubClient_IncrementsLogIndex()
{
var stubClient = new StubRekorSubmissionClient();
var request = new ManifestRekorSubmissionRequest
{
PayloadType = "test",
PayloadBase64 = "dGVzdA==",
Signatures = [],
BundleSha256 = "abc123",
ArtifactKind = "test",
ArtifactSha256 = "def456"
};
var response1 = await stubClient.SubmitAsync(request, TestRekorUrl);
var response2 = await stubClient.SubmitAsync(request, TestRekorUrl);
response1.LogIndex.Should().Be(1);
response2.LogIndex.Should().Be(2);
}
[Fact]
public async Task StubClient_ProducesConsistentUuidForSameContent()
{
var stubClient1 = new StubRekorSubmissionClient();
var stubClient2 = new StubRekorSubmissionClient();
var request = new ManifestRekorSubmissionRequest
{
PayloadType = "test",
PayloadBase64 = "dGVzdA==",
Signatures = [],
BundleSha256 = "abc123",
ArtifactKind = "test",
ArtifactSha256 = "def456"
};
var response1 = await stubClient1.SubmitAsync(request, TestRekorUrl);
var response2 = await stubClient2.SubmitAsync(request, TestRekorUrl);
response1.Uuid.Should().Be(response2.Uuid);
}
[Fact]
public async Task StubClient_UsesFakeTimeProvider()
{
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero));
var stubClient = new StubRekorSubmissionClient(fakeTime);
var request = new ManifestRekorSubmissionRequest
{
PayloadType = "test",
PayloadBase64 = "dGVzdA==",
Signatures = [],
BundleSha256 = "abc123",
ArtifactKind = "test",
ArtifactSha256 = "def456"
};
var response = await stubClient.SubmitAsync(request, TestRekorUrl);
response.IntegratedTime.Should().Be(fakeTime.GetUtcNow().ToUnixTimeSeconds());
}
#endregion
}

View File

@@ -0,0 +1,449 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2026 StellaOps
// Sprint: SPRINT_20260118_028_LIB_scoring_manifest_jcs_integration
// Task: TASK-028-004 - Scoring Manifest DSSE Signing
using System.Collections.Immutable;
using System.Security.Cryptography;
using FluentAssertions;
using StellaOps.DeltaVerdict.Manifest;
using StellaOps.DeltaVerdict.Signing;
using Xunit;
namespace StellaOps.DeltaVerdict.Tests.Signing;
public class ScoringManifestSigningServiceTests
{
private readonly ScoringManifestSigningService _service = new();
private readonly string _testSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
private const string TestKeyId = "test-key-001";
private static ScoringManifest CreateTestManifest()
{
return new ScoringManifest
{
SchemaVersion = ScoringManifest.CurrentSchemaVersion,
ScoringVersion = "v2026-01-18-1",
Weights = ScoringWeights.Default,
Normalizers = ScoringNormalizers.Default,
TrustedVexKeys = ImmutableArray.Create("key1", "key2"),
CodeHash = "sha256:abc123def456",
CreatedAt = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero)
};
}
#region Sign/Verify Round-Trip Tests
[Fact]
public async Task SignAsync_ProducesValidSignature()
{
var manifest = CreateTestManifest();
var options = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, options);
signedManifest.ManifestDigest.Should().NotBeNullOrEmpty();
signedManifest.ManifestDigest.Should().StartWith("sha256:");
signedManifest.DsseSignature.Should().NotBeNullOrEmpty();
signedManifest.DsseSignature.Should().Contain("payloadType");
signedManifest.DsseSignature.Should().Contain("payload");
signedManifest.DsseSignature.Should().Contain("signatures");
}
[Fact]
public async Task SignAsync_VerifyAsync_RoundTrip_Succeeds()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(signedManifest, verifyOptions);
result.IsValid.Should().BeTrue();
result.VerifiedKeyId.Should().Be(TestKeyId);
result.Error.Should().BeNull();
}
[Fact]
public async Task SignAsync_ProducesCorrectPayloadType()
{
var manifest = CreateTestManifest();
var options = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, options);
signedManifest.DsseSignature.Should().Contain(ScoringManifestSigningService.PayloadType);
}
[Fact]
public async Task SignAsync_IsDeterministic()
{
var manifest = CreateTestManifest();
var options = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signed1 = await _service.SignAsync(manifest, options);
var signed2 = await _service.SignAsync(manifest, options);
signed1.ManifestDigest.Should().Be(signed2.ManifestDigest);
signed1.DsseSignature.Should().Be(signed2.DsseSignature);
}
#endregion
#region Tamper Detection Tests
[Fact]
public async Task VerifyAsync_DetectsTamperedWeight()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
// Tamper with the manifest by changing a weight
var tamperedManifest = signedManifest with
{
Weights = ScoringWeights.Default with { CvssBase = 0.99 }
};
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(tamperedManifest, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("modified");
}
[Fact]
public async Task VerifyAsync_DetectsTamperedScoringVersion()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
// Tamper with the scoring version
var tamperedManifest = signedManifest with
{
ScoringVersion = "v2026-01-19-1"
};
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(tamperedManifest, verifyOptions);
result.IsValid.Should().BeFalse();
}
[Fact]
public async Task VerifyAsync_DetectsTamperedTrustedVexKeys()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
// Tamper by adding a malicious key
var tamperedManifest = signedManifest with
{
TrustedVexKeys = ImmutableArray.Create("key1", "key2", "malicious-key")
};
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(tamperedManifest, verifyOptions);
result.IsValid.Should().BeFalse();
}
#endregion
#region Wrong Key Tests
[Fact]
public async Task VerifyAsync_FailsWithWrongSecret()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
var wrongSecret = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32));
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = wrongSecret
};
var result = await _service.VerifyAsync(signedManifest, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("verification failed");
}
[Fact]
public async Task VerifyAsync_FailsWithWrongKeyId()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
var verifyOptions = new ManifestVerificationOptions
{
KeyId = "wrong-key-id",
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(signedManifest, verifyOptions);
result.IsValid.Should().BeFalse();
}
#endregion
#region Unsigned Manifest Tests
[Fact]
public async Task VerifyAsync_FailsForUnsignedManifest()
{
var manifest = CreateTestManifest();
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(manifest, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("not signed");
}
[Fact]
public async Task VerifyAsync_FailsForInvalidSignatureJson()
{
var manifest = CreateTestManifest() with
{
DsseSignature = "not valid json {"
};
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = _testSecret
};
var result = await _service.VerifyAsync(manifest, verifyOptions);
result.IsValid.Should().BeFalse();
result.Error.Should().Contain("Invalid signature envelope");
}
#endregion
#region Digest Tests
[Fact]
public void ComputeDigest_ProducesSha256Digest()
{
var manifest = CreateTestManifest();
var digest = _service.ComputeDigest(manifest);
digest.Should().StartWith("sha256:");
digest.Should().HaveLength(71); // "sha256:" (7) + 64 hex chars
digest.Substring(7).Should().MatchRegex("^[0-9a-f]{64}$");
}
[Fact]
public void ComputeDigest_IsDeterministic()
{
var manifest = CreateTestManifest();
var digest1 = _service.ComputeDigest(manifest);
var digest2 = _service.ComputeDigest(manifest);
digest1.Should().Be(digest2);
}
[Fact]
public void ComputeDigest_DiffersForDifferentManifests()
{
var manifest1 = CreateTestManifest();
var manifest2 = CreateTestManifest() with { ScoringVersion = "v2026-01-19-1" };
var digest1 = _service.ComputeDigest(manifest1);
var digest2 = _service.ComputeDigest(manifest2);
digest1.Should().NotBe(digest2);
}
#endregion
#region Canonical JSON Tests
[Fact]
public void GetCanonicalJson_ProducesValidJson()
{
var manifest = CreateTestManifest();
var json = _service.GetCanonicalJson(manifest);
json.Should().NotBeNullOrEmpty();
json.Should().Contain("\"schema_version\"");
json.Should().Contain("\"scoring_version\"");
json.Should().Contain("\"weights\"");
json.Should().Contain("\"normalizers\"");
json.Should().Contain("\"trusted_vex_keys\"");
json.Should().Contain("\"code_hash\"");
}
[Fact]
public void GetCanonicalJson_ExcludesSignatureFields()
{
var manifest = CreateTestManifest() with
{
ManifestDigest = "sha256:test",
DsseSignature = "test-signature"
};
var json = _service.GetCanonicalJson(manifest);
json.Should().NotContain("manifest_digest");
json.Should().NotContain("dsse_signature");
json.Should().NotContain("rekor_anchor");
}
[Fact]
public void GetCanonicalJson_IsDeterministic_100Iterations()
{
var manifest = CreateTestManifest();
var results = Enumerable.Range(0, 100)
.Select(_ => _service.GetCanonicalJson(manifest))
.Distinct()
.ToList();
results.Should().ContainSingle("All 100 serializations should produce identical output");
}
#endregion
#region Algorithm Tests
[Fact]
public async Task SignAsync_WithSha256Algorithm_Works()
{
var manifest = CreateTestManifest();
var signingOptions = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.Sha256
};
var signedManifest = await _service.SignAsync(manifest, signingOptions);
signedManifest.DsseSignature.Should().NotBeNullOrEmpty();
var verifyOptions = new ManifestVerificationOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.Sha256
};
var result = await _service.VerifyAsync(signedManifest, verifyOptions);
result.IsValid.Should().BeTrue();
}
[Fact]
public async Task SignAsync_WithoutSecret_ThrowsForHmac()
{
var manifest = CreateTestManifest();
var options = new ManifestSigningOptions
{
KeyId = TestKeyId,
Algorithm = ManifestSigningAlgorithm.HmacSha256,
SecretBase64 = null
};
var act = async () => await _service.SignAsync(manifest, options);
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage("*secret*");
}
#endregion
}

View File

@@ -12,5 +12,10 @@
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.DeltaVerdict\StellaOps.DeltaVerdict.csproj" />
<ProjectReference Include="../../StellaOps.TestKit/StellaOps.TestKit.csproj" />
<ProjectReference Include="../../../Signals/StellaOps.Signals/StellaOps.Signals.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -24,4 +24,8 @@
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -17,4 +17,8 @@
<ProjectReference Include="..\..\StellaOps.TestKit\StellaOps.TestKit.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -19,4 +19,8 @@
<ProjectReference Include="../../StellaOps.HybridLogicalClock/StellaOps.HybridLogicalClock.csproj" />
<ProjectReference Include="../../StellaOps.TestKit/StellaOps.TestKit.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -26,4 +26,8 @@
<ProjectReference Include="../../StellaOps.Provcache.Api/StellaOps.Provcache.Api.csproj" />
<ProjectReference Include="../../StellaOps.TestKit/StellaOps.TestKit.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}

View File

@@ -21,4 +21,8 @@
<ProjectReference Include="..\..\StellaOps.Reachability.Core\StellaOps.Reachability.Core.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,5 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"parallelizeTestCollections": false,
"maxParallelThreads": 1
}