save checkpoint

This commit is contained in:
master
2026-02-11 01:32:14 +02:00
parent 5593212b41
commit cf5b72974f
2316 changed files with 68799 additions and 3808 deletions

View File

@@ -221,6 +221,107 @@ public sealed record SbomUploadRecordDto
public DateTimeOffset CreatedAtUtc { get; init; }
}
/// <summary>
/// Latest-by-payload hot lookup response row.
/// </summary>
public sealed record SbomHotLookupLatestResponseDto
{
[JsonPropertyName("buildId")]
public string BuildId { get; init; } = string.Empty;
[JsonPropertyName("canonicalBomSha256")]
public string CanonicalBomSha256 { get; init; } = string.Empty;
[JsonPropertyName("payloadDigest")]
public string PayloadDigest { get; init; } = string.Empty;
[JsonPropertyName("insertedAtUtc")]
public DateTimeOffset InsertedAtUtc { get; init; }
[JsonPropertyName("evidenceScore")]
public int EvidenceScore { get; init; }
[JsonPropertyName("rekorTileId")]
public string? RekorTileId { get; init; }
}
/// <summary>
/// Shared hot lookup row for component search.
/// </summary>
public sealed record SbomHotLookupComponentItemDto
{
[JsonPropertyName("buildId")]
public string BuildId { get; init; } = string.Empty;
[JsonPropertyName("canonicalBomSha256")]
public string CanonicalBomSha256 { get; init; } = string.Empty;
[JsonPropertyName("payloadDigest")]
public string PayloadDigest { get; init; } = string.Empty;
[JsonPropertyName("insertedAtUtc")]
public DateTimeOffset InsertedAtUtc { get; init; }
[JsonPropertyName("evidenceScore")]
public int EvidenceScore { get; init; }
}
/// <summary>
/// Component search response with bounded pagination.
/// </summary>
public sealed record SbomHotLookupComponentSearchResponseDto
{
[JsonPropertyName("limit")]
public int Limit { get; init; }
[JsonPropertyName("offset")]
public int Offset { get; init; }
[JsonPropertyName("items")]
public IReadOnlyList<SbomHotLookupComponentItemDto> Items { get; init; }
= Array.Empty<SbomHotLookupComponentItemDto>();
}
/// <summary>
/// Pending triage row from merged VEX projection.
/// </summary>
public sealed record SbomHotLookupPendingItemDto
{
[JsonPropertyName("buildId")]
public string BuildId { get; init; } = string.Empty;
[JsonPropertyName("canonicalBomSha256")]
public string CanonicalBomSha256 { get; init; } = string.Empty;
[JsonPropertyName("payloadDigest")]
public string PayloadDigest { get; init; } = string.Empty;
[JsonPropertyName("insertedAtUtc")]
public DateTimeOffset InsertedAtUtc { get; init; }
[JsonPropertyName("evidenceScore")]
public int EvidenceScore { get; init; }
[JsonPropertyName("pending")]
public JsonElement Pending { get; init; }
}
/// <summary>
/// Pending triage search response with bounded pagination.
/// </summary>
public sealed record SbomHotLookupPendingSearchResponseDto
{
[JsonPropertyName("limit")]
public int Limit { get; init; }
[JsonPropertyName("offset")]
public int Offset { get; init; }
[JsonPropertyName("items")]
public IReadOnlyList<SbomHotLookupPendingItemDto> Items { get; init; }
= Array.Empty<SbomHotLookupPendingItemDto>();
}
/// <summary>
/// SBOM format types.
/// </summary>

View File

@@ -133,6 +133,8 @@ internal static class SbomEndpoints
sbomDocument,
format,
contentDigest,
snapshot.Target.Digest,
parsed.Value,
cancellationToken).ConfigureAwait(false);
sbomDocument.Dispose();

View File

@@ -0,0 +1,174 @@
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.WebService.Constants;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Infrastructure;
using StellaOps.Scanner.WebService.Security;
using StellaOps.Scanner.WebService.Services;
namespace StellaOps.Scanner.WebService.Endpoints;
internal static class SbomHotLookupEndpoints
{
public static void MapSbomHotLookupEndpoints(this RouteGroupBuilder sbomGroup)
{
ArgumentNullException.ThrowIfNull(sbomGroup);
var hotLookup = sbomGroup.MapGroup("/hot-lookup");
hotLookup.MapGet("/payload/{payloadDigest}/latest", HandleGetLatestByPayloadDigestAsync)
.WithName("scanner.sbom.hotlookup.latest-by-payload")
.WithTags("SBOM")
.Produces<SbomHotLookupLatestResponseDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound)
.Produces(StatusCodes.Status400BadRequest)
.RequireAuthorization(ScannerPolicies.ScansRead);
hotLookup.MapGet("/components", HandleSearchComponentsAsync)
.WithName("scanner.sbom.hotlookup.components")
.WithTags("SBOM")
.Produces<SbomHotLookupComponentSearchResponseDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.RequireAuthorization(ScannerPolicies.ScansRead);
hotLookup.MapGet("/pending-triage", HandleSearchPendingTriageAsync)
.WithName("scanner.sbom.hotlookup.pending-triage")
.WithTags("SBOM")
.Produces<SbomHotLookupPendingSearchResponseDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.RequireAuthorization(ScannerPolicies.ScansRead);
}
private static async Task<IResult> HandleGetLatestByPayloadDigestAsync(
string payloadDigest,
ISbomHotLookupService hotLookupService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(hotLookupService);
if (string.IsNullOrWhiteSpace(payloadDigest))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid payload digest",
StatusCodes.Status400BadRequest,
detail: "payloadDigest is required.");
}
var latest = await hotLookupService
.GetLatestByPayloadDigestAsync(payloadDigest, cancellationToken)
.ConfigureAwait(false);
if (latest is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"No SBOM projection found",
StatusCodes.Status404NotFound,
detail: "No artifact_boms projection row exists for the provided payload digest.");
}
return Results.Ok(latest);
}
private static async Task<IResult> HandleSearchComponentsAsync(
string? purl,
string? name,
string? minVersion,
int limit,
int offset,
ISbomHotLookupService hotLookupService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(hotLookupService);
var hasPurl = !string.IsNullOrWhiteSpace(purl);
var hasName = !string.IsNullOrWhiteSpace(name);
if (!hasPurl && !hasName)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid component query",
StatusCodes.Status400BadRequest,
detail: "Provide either 'purl' or 'name' query parameter.");
}
if (hasPurl && hasName)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Ambiguous component query",
StatusCodes.Status400BadRequest,
detail: "Use either 'purl' or 'name', not both.");
}
if (!SbomHotLookupService.IsLimitValid(limit))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid limit",
StatusCodes.Status400BadRequest,
detail: "limit must be between 1 and 200.");
}
if (!SbomHotLookupService.IsOffsetValid(offset))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid offset",
StatusCodes.Status400BadRequest,
detail: "offset must be greater than or equal to 0.");
}
var result = await hotLookupService
.SearchComponentsAsync(purl, name, minVersion, limit, offset, cancellationToken)
.ConfigureAwait(false);
return Results.Ok(result);
}
private static async Task<IResult> HandleSearchPendingTriageAsync(
int limit,
int offset,
ISbomHotLookupService hotLookupService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(hotLookupService);
if (!SbomHotLookupService.IsLimitValid(limit))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid limit",
StatusCodes.Status400BadRequest,
detail: "limit must be between 1 and 200.");
}
if (!SbomHotLookupService.IsOffsetValid(offset))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid offset",
StatusCodes.Status400BadRequest,
detail: "offset must be greater than or equal to 0.");
}
var result = await hotLookupService
.SearchPendingTriageAsync(limit, offset, cancellationToken)
.ConfigureAwait(false);
return Results.Ok(result);
}
}

View File

@@ -30,6 +30,8 @@ internal static class SbomUploadEndpoints
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
sbomGroup.MapSbomHotLookupEndpoints();
}
private static async Task<IResult> HandleUploadAsync(

View File

@@ -158,6 +158,7 @@ builder.Services.AddSingleton<IAttestationChainVerifier, AttestationChainVerifie
builder.Services.AddSingleton<IHumanApprovalAttestationService, HumanApprovalAttestationService>();
builder.Services.AddScoped<ICallGraphIngestionService, CallGraphIngestionService>();
builder.Services.AddScoped<ISbomIngestionService, SbomIngestionService>();
builder.Services.AddScoped<ISbomHotLookupService, SbomHotLookupService>();
builder.Services.AddScoped<ILayerSbomService, LayerSbomService>();
builder.Services.AddSingleton<ISbomUploadStore, InMemorySbomUploadStore>();
builder.Services.AddScoped<ISbomByosUploadService, SbomByosUploadService>();

View File

@@ -27,6 +27,8 @@ public interface ISbomIngestionService
JsonDocument sbomDocument,
string format,
string? contentDigest,
string? payloadDigest,
string? buildId,
CancellationToken cancellationToken = default);
/// <summary>

View File

@@ -121,7 +121,14 @@ internal sealed class SbomByosUploadService : ISbomByosUploadService
var scanId = ScanIdGenerator.Create(target, force: false, clientRequestId: null, metadata);
var ingestion = await _ingestionService
.IngestAsync(scanId, document, format, digest, cancellationToken)
.IngestAsync(
scanId,
document,
format,
digest,
target.Digest,
request.Source?.CiContext?.BuildId,
cancellationToken)
.ConfigureAwait(false);
var submission = new ScanSubmission(target, false, null, metadata);

View File

@@ -0,0 +1,184 @@
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.WebService.Contracts;
using System.Text.Json;
namespace StellaOps.Scanner.WebService.Services;
internal interface ISbomHotLookupService
{
Task<SbomHotLookupLatestResponseDto?> GetLatestByPayloadDigestAsync(
string payloadDigest,
CancellationToken cancellationToken = default);
Task<SbomHotLookupComponentSearchResponseDto> SearchComponentsAsync(
string? purl,
string? name,
string? minVersion,
int limit,
int offset,
CancellationToken cancellationToken = default);
Task<SbomHotLookupPendingSearchResponseDto> SearchPendingTriageAsync(
int limit,
int offset,
CancellationToken cancellationToken = default);
}
internal sealed class SbomHotLookupService : ISbomHotLookupService
{
private const int DefaultLimit = 50;
private const int DefaultPendingLimit = 100;
private const int MaxLimit = 200;
private readonly IArtifactBomRepository _repository;
public SbomHotLookupService(IArtifactBomRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
public async Task<SbomHotLookupLatestResponseDto?> GetLatestByPayloadDigestAsync(
string payloadDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(payloadDigest);
var row = await _repository
.TryGetLatestByPayloadDigestAsync(payloadDigest.Trim(), cancellationToken)
.ConfigureAwait(false);
return row is null ? null : MapLatest(row);
}
public async Task<SbomHotLookupComponentSearchResponseDto> SearchComponentsAsync(
string? purl,
string? name,
string? minVersion,
int limit,
int offset,
CancellationToken cancellationToken = default)
{
var normalizedLimit = NormalizeLimit(limit, DefaultLimit);
var normalizedOffset = NormalizeOffset(offset);
IReadOnlyList<ArtifactBomRow> rows;
if (!string.IsNullOrWhiteSpace(purl))
{
rows = await _repository
.FindByComponentPurlAsync(
purl.Trim(),
normalizedLimit,
normalizedOffset,
cancellationToken)
.ConfigureAwait(false);
}
else
{
rows = await _repository
.FindByComponentNameAsync(
name!.Trim().ToLowerInvariant(),
minVersion,
normalizedLimit,
normalizedOffset,
cancellationToken)
.ConfigureAwait(false);
}
return new SbomHotLookupComponentSearchResponseDto
{
Limit = normalizedLimit,
Offset = normalizedOffset,
Items = rows.Select(MapComponentItem).ToArray()
};
}
public async Task<SbomHotLookupPendingSearchResponseDto> SearchPendingTriageAsync(
int limit,
int offset,
CancellationToken cancellationToken = default)
{
var normalizedLimit = NormalizeLimit(limit, DefaultPendingLimit);
var normalizedOffset = NormalizeOffset(offset);
var rows = await _repository
.FindPendingTriageAsync(normalizedLimit, normalizedOffset, cancellationToken)
.ConfigureAwait(false);
return new SbomHotLookupPendingSearchResponseDto
{
Limit = normalizedLimit,
Offset = normalizedOffset,
Items = rows.Select(MapPendingItem).ToArray()
};
}
public static bool IsLimitValid(int limit)
=> limit == 0 || (limit >= 1 && limit <= MaxLimit);
public static bool IsOffsetValid(int offset)
=> offset >= 0;
private static int NormalizeLimit(int requestedLimit, int fallback)
{
if (requestedLimit <= 0)
{
return fallback;
}
return Math.Min(requestedLimit, MaxLimit);
}
private static int NormalizeOffset(int requestedOffset)
=> requestedOffset < 0 ? 0 : requestedOffset;
private static SbomHotLookupLatestResponseDto MapLatest(ArtifactBomRow row)
{
return new SbomHotLookupLatestResponseDto
{
BuildId = row.BuildId,
CanonicalBomSha256 = row.CanonicalBomSha256,
PayloadDigest = row.PayloadDigest,
InsertedAtUtc = row.InsertedAt.ToUniversalTime(),
EvidenceScore = row.EvidenceScore,
RekorTileId = row.RekorTileId
};
}
private static SbomHotLookupComponentItemDto MapComponentItem(ArtifactBomRow row)
{
return new SbomHotLookupComponentItemDto
{
BuildId = row.BuildId,
CanonicalBomSha256 = row.CanonicalBomSha256,
PayloadDigest = row.PayloadDigest,
InsertedAtUtc = row.InsertedAt.ToUniversalTime(),
EvidenceScore = row.EvidenceScore
};
}
private static SbomHotLookupPendingItemDto MapPendingItem(ArtifactBomRow row)
{
return new SbomHotLookupPendingItemDto
{
BuildId = row.BuildId,
CanonicalBomSha256 = row.CanonicalBomSha256,
PayloadDigest = row.PayloadDigest,
InsertedAtUtc = row.InsertedAt.ToUniversalTime(),
EvidenceScore = row.EvidenceScore,
Pending = ParsePendingJson(row.PendingMergedVexJson)
};
}
private static JsonElement ParsePendingJson(string? pendingJson)
{
if (string.IsNullOrWhiteSpace(pendingJson))
{
using var empty = JsonDocument.Parse("[]");
return empty.RootElement.Clone();
}
using var document = JsonDocument.Parse(pendingJson);
return document.RootElement.Clone();
}
}

View File

@@ -1,9 +1,12 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Storage.Catalog;
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.Storage.Services;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.Scanner.WebService.Services;
@@ -16,11 +19,19 @@ internal sealed class SbomIngestionService : ISbomIngestionService
};
private readonly ArtifactStorageService _artifactStorage;
private readonly IArtifactBomRepository _artifactBomRepository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<SbomIngestionService> _logger;
public SbomIngestionService(ArtifactStorageService artifactStorage, ILogger<SbomIngestionService> logger)
public SbomIngestionService(
ArtifactStorageService artifactStorage,
IArtifactBomRepository artifactBomRepository,
TimeProvider timeProvider,
ILogger<SbomIngestionService> logger)
{
_artifactStorage = artifactStorage ?? throw new ArgumentNullException(nameof(artifactStorage));
_artifactBomRepository = artifactBomRepository ?? throw new ArgumentNullException(nameof(artifactBomRepository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -96,6 +107,8 @@ internal sealed class SbomIngestionService : ISbomIngestionService
JsonDocument sbomDocument,
string format,
string? contentDigest,
string? payloadDigest,
string? buildId,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(sbomDocument);
@@ -126,20 +139,49 @@ internal sealed class SbomIngestionService : ISbomIngestionService
stored.BytesSha256);
}
var componentCount = CountComponents(sbomDocument, format);
var canonical = BuildCanonicalProjection(sbomDocument.RootElement, format);
var canonicalBomJson = SerializeCanonicalProjection(canonical);
var canonicalBomSha256 = ComputeSha256Digest(canonicalBomJson);
var mergedVexJson = ExtractMergedVexProjection(sbomDocument.RootElement, format);
var attestationsJson = ExtractAttestationsProjection(sbomDocument.RootElement);
var rekorTileId = ExtractRekorTileId(sbomDocument.RootElement);
var projectionInsertedAt = _timeProvider.GetUtcNow();
var projectionRow = new ArtifactBomRow
{
BuildId = NormalizeBuildId(buildId, scanId),
CanonicalBomSha256 = canonicalBomSha256,
PayloadDigest = NormalizePayloadDigest(payloadDigest, scanId),
InsertedAt = projectionInsertedAt,
RawBomRef = stored.Id,
CanonicalBomRef = stored.Id,
DsseEnvelopeRef = null,
MergedVexRef = string.IsNullOrWhiteSpace(mergedVexJson) ? null : stored.Id,
CanonicalBomJson = canonicalBomJson,
MergedVexJson = mergedVexJson,
AttestationsJson = attestationsJson,
EvidenceScore = ComputeEvidenceScore(
canonical.Components.Count,
canonical.ComponentsWithPurl,
!string.IsNullOrWhiteSpace(mergedVexJson)),
RekorTileId = rekorTileId
};
await _artifactBomRepository.UpsertMonthlyAsync(projectionRow, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Ingested sbom scan={ScanId} format={Format} components={Components} digest={Digest} id={SbomId}",
"Ingested sbom scan={ScanId} format={Format} components={Components} digest={Digest} id={SbomId} payloadDigest={PayloadDigest}",
scanId.Value,
format,
componentCount,
canonical.Components.Count,
stored.BytesSha256,
stored.Id);
stored.Id,
projectionRow.PayloadDigest);
return new SbomIngestionResult(
SbomId: stored.Id,
Format: format,
ComponentCount: componentCount,
ComponentCount: canonical.Components.Count,
Digest: stored.BytesSha256);
}
@@ -158,36 +200,460 @@ internal sealed class SbomIngestionService : ISbomIngestionService
return (ArtifactDocumentFormat.CycloneDxJson, "application/json");
}
private static int CountComponents(JsonDocument document, string format)
private static CanonicalProjection BuildCanonicalProjection(JsonElement root, string format)
{
if (document.RootElement.ValueKind != JsonValueKind.Object)
{
return 0;
}
var components = string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase)
? ExtractSpdxComponents(root)
: ExtractCycloneDxComponents(root);
var root = document.RootElement;
var ordered = components
.OrderBy(component => component.Purl ?? string.Empty, StringComparer.Ordinal)
.ThenBy(component => component.Name, StringComparer.Ordinal)
.ThenBy(component => component.Version ?? string.Empty, StringComparer.Ordinal)
.ToList();
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
{
if (root.TryGetProperty("components", out var components) && components.ValueKind == JsonValueKind.Array)
{
return components.GetArrayLength();
}
return 0;
}
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
{
if (root.TryGetProperty("packages", out var packages) && packages.ValueKind == JsonValueKind.Array)
{
return packages.GetArrayLength();
}
return 0;
}
return 0;
return new CanonicalProjection(
Format: format.Trim().ToLowerInvariant(),
Components: ordered,
ComponentsWithPurl: ordered.Count(component => !string.IsNullOrWhiteSpace(component.Purl)));
}
}
private static IReadOnlyList<CanonicalComponent> ExtractCycloneDxComponents(JsonElement root)
{
if (!TryGetPropertyCaseInsensitive(root, "components", out var components)
|| components.ValueKind != JsonValueKind.Array)
{
return Array.Empty<CanonicalComponent>();
}
var result = new List<CanonicalComponent>();
foreach (var component in components.EnumerateArray())
{
if (component.ValueKind != JsonValueKind.Object)
{
continue;
}
var purl = NormalizeOptionalString(GetString(component, "purl"), toLower: true);
var name = NormalizeOptionalString(GetString(component, "name"), toLower: true);
var version = NormalizeOptionalString(GetString(component, "version"), toLower: false);
if (string.IsNullOrWhiteSpace(purl) && string.IsNullOrWhiteSpace(name))
{
continue;
}
result.Add(new CanonicalComponent(
Name: name ?? string.Empty,
Version: version,
Purl: purl));
}
return result;
}
private static IReadOnlyList<CanonicalComponent> ExtractSpdxComponents(JsonElement root)
{
if (!TryGetPropertyCaseInsensitive(root, "packages", out var packages)
|| packages.ValueKind != JsonValueKind.Array)
{
return Array.Empty<CanonicalComponent>();
}
var result = new List<CanonicalComponent>();
foreach (var package in packages.EnumerateArray())
{
if (package.ValueKind != JsonValueKind.Object)
{
continue;
}
var purl = NormalizeOptionalString(ExtractSpdxPurl(package), toLower: true);
var name = NormalizeOptionalString(GetString(package, "name"), toLower: true);
var version = NormalizeOptionalString(GetString(package, "versionInfo"), toLower: false);
if (string.IsNullOrWhiteSpace(purl) && string.IsNullOrWhiteSpace(name))
{
continue;
}
result.Add(new CanonicalComponent(
Name: name ?? string.Empty,
Version: version,
Purl: purl));
}
return result;
}
private static string? ExtractSpdxPurl(JsonElement package)
{
if (!TryGetPropertyCaseInsensitive(package, "externalRefs", out var references)
|| references.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var reference in references.EnumerateArray())
{
if (reference.ValueKind != JsonValueKind.Object)
{
continue;
}
var referenceType = GetString(reference, "referenceType");
if (!string.Equals(referenceType, "purl", StringComparison.OrdinalIgnoreCase))
{
continue;
}
return GetString(reference, "referenceLocator");
}
return null;
}
private static string SerializeCanonicalProjection(CanonicalProjection canonical)
{
using var stream = new MemoryStream();
using var writer = new Utf8JsonWriter(stream);
writer.WriteStartObject();
writer.WriteString("format", canonical.Format);
writer.WriteStartArray("components");
foreach (var component in canonical.Components)
{
writer.WriteStartObject();
writer.WriteString("name", component.Name);
if (!string.IsNullOrWhiteSpace(component.Version))
{
writer.WriteString("version", component.Version);
}
if (!string.IsNullOrWhiteSpace(component.Purl))
{
writer.WriteString("purl", component.Purl);
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WriteEndObject();
writer.Flush();
return Encoding.UTF8.GetString(stream.ToArray());
}
private static string ComputeSha256Digest(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private static string? ExtractMergedVexProjection(JsonElement root, string format)
{
if (TryGetPropertyCaseInsensitive(root, "merged_vex", out var mergedVex))
{
return CanonicalizeJson(mergedVex);
}
if (TryGetPropertyCaseInsensitive(root, "mergedVex", out mergedVex))
{
return CanonicalizeJson(mergedVex);
}
if (!string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (!TryGetPropertyCaseInsensitive(root, "vulnerabilities", out var vulnerabilities)
|| vulnerabilities.ValueKind != JsonValueKind.Array)
{
return null;
}
var entries = new List<MergedVexEntry>();
foreach (var vulnerability in vulnerabilities.EnumerateArray())
{
if (vulnerability.ValueKind != JsonValueKind.Object)
{
continue;
}
var vulnerabilityId = NormalizeOptionalString(
GetString(vulnerability, "id"),
toLower: false);
string state = "unknown";
if (TryGetPropertyCaseInsensitive(vulnerability, "analysis", out var analysis)
&& analysis.ValueKind == JsonValueKind.Object)
{
state = NormalizeOptionalString(GetString(analysis, "state"), toLower: true) ?? "unknown";
}
var affected = new List<string>();
if (TryGetPropertyCaseInsensitive(vulnerability, "affects", out var affects)
&& affects.ValueKind == JsonValueKind.Array)
{
foreach (var affectedRef in affects.EnumerateArray())
{
if (affectedRef.ValueKind != JsonValueKind.Object)
{
continue;
}
var refValue = NormalizeOptionalString(GetString(affectedRef, "ref"), toLower: false);
if (!string.IsNullOrWhiteSpace(refValue))
{
affected.Add(refValue);
}
}
}
entries.Add(new MergedVexEntry(
Id: vulnerabilityId ?? string.Empty,
State: string.IsNullOrWhiteSpace(state) ? "unknown" : state,
Affected: affected
.Distinct(StringComparer.Ordinal)
.OrderBy(value => value, StringComparer.Ordinal)
.ToArray()));
}
if (entries.Count == 0)
{
return null;
}
entries.Sort(static (left, right) =>
{
var byId = StringComparer.Ordinal.Compare(left.Id, right.Id);
if (byId != 0)
{
return byId;
}
return StringComparer.Ordinal.Compare(left.State, right.State);
});
using var stream = new MemoryStream();
using var writer = new Utf8JsonWriter(stream);
writer.WriteStartArray();
foreach (var entry in entries)
{
writer.WriteStartObject();
writer.WriteString("id", entry.Id);
writer.WriteString("state", entry.State);
writer.WriteStartArray("affected");
foreach (var affected in entry.Affected)
{
writer.WriteStringValue(affected);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.Flush();
return Encoding.UTF8.GetString(stream.ToArray());
}
private static string? ExtractAttestationsProjection(JsonElement root)
{
if (TryGetPropertyCaseInsensitive(root, "attestations", out var attestations))
{
return CanonicalizeJson(attestations);
}
return null;
}
private static string? ExtractRekorTileId(JsonElement root)
{
if (TryGetPropertyCaseInsensitive(root, "rekor_tile_id", out var rekorTileId)
&& rekorTileId.ValueKind == JsonValueKind.String)
{
return NormalizeOptionalString(rekorTileId.GetString(), toLower: false);
}
if (TryGetPropertyCaseInsensitive(root, "rekorTileId", out rekorTileId)
&& rekorTileId.ValueKind == JsonValueKind.String)
{
return NormalizeOptionalString(rekorTileId.GetString(), toLower: false);
}
return null;
}
private static int ComputeEvidenceScore(int componentCount, int componentsWithPurl, bool hasMergedVex)
{
if (componentCount <= 0)
{
return hasMergedVex ? 20 : 0;
}
var purlCoverage = (double)componentsWithPurl / componentCount;
var score = (purlCoverage * 80d) + (hasMergedVex ? 20d : 0d);
return Math.Clamp((int)Math.Round(score, MidpointRounding.AwayFromZero), 0, 100);
}
private static string NormalizeBuildId(string? buildId, ScanId scanId)
{
if (!string.IsNullOrWhiteSpace(buildId))
{
return buildId.Trim();
}
return scanId.Value;
}
private static string NormalizePayloadDigest(string? payloadDigest, ScanId scanId)
{
if (string.IsNullOrWhiteSpace(payloadDigest))
{
return $"scan:{scanId.Value}";
}
var normalized = payloadDigest.Trim().ToLowerInvariant();
if (normalized.Contains(':', StringComparison.Ordinal))
{
return normalized;
}
if (normalized.All(IsHexChar))
{
return $"sha256:{normalized}";
}
return normalized;
}
private static bool IsHexChar(char c)
{
return (c >= '0' && c <= '9')
|| (c >= 'a' && c <= 'f')
|| (c >= 'A' && c <= 'F');
}
private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement value)
{
if (element.ValueKind == JsonValueKind.Object)
{
foreach (var property in element.EnumerateObject())
{
if (string.Equals(property.Name, propertyName, StringComparison.OrdinalIgnoreCase))
{
value = property.Value;
return true;
}
}
}
value = default;
return false;
}
private static string GetString(JsonElement element, string propertyName)
{
if (!TryGetPropertyCaseInsensitive(element, propertyName, out var value))
{
return string.Empty;
}
return value.ValueKind == JsonValueKind.String
? value.GetString() ?? string.Empty
: string.Empty;
}
private static string? NormalizeOptionalString(string? value, bool toLower)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
var normalized = value.Trim();
return toLower ? normalized.ToLowerInvariant() : normalized;
}
private static string? CanonicalizeJson(JsonElement element)
{
if (element.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null)
{
return null;
}
using var stream = new MemoryStream();
using var writer = new Utf8JsonWriter(stream);
WriteCanonicalElement(writer, element);
writer.Flush();
return Encoding.UTF8.GetString(stream.ToArray());
}
private static void WriteCanonicalElement(Utf8JsonWriter writer, JsonElement element)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
{
writer.WriteStartObject();
foreach (var property in element.EnumerateObject().OrderBy(property => property.Name, StringComparer.Ordinal))
{
writer.WritePropertyName(property.Name);
WriteCanonicalElement(writer, property.Value);
}
writer.WriteEndObject();
return;
}
case JsonValueKind.Array:
{
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonicalElement(writer, item);
}
writer.WriteEndArray();
return;
}
case JsonValueKind.String:
writer.WriteStringValue(element.GetString());
return;
case JsonValueKind.Number:
writer.WriteRawValue(element.GetRawText(), skipInputValidation: true);
return;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
return;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
return;
case JsonValueKind.Null:
case JsonValueKind.Undefined:
writer.WriteNullValue();
return;
default:
writer.WriteNullValue();
return;
}
}
private sealed record CanonicalProjection(
string Format,
IReadOnlyList<CanonicalComponent> Components,
int ComponentsWithPurl);
private sealed record CanonicalComponent(
string Name,
string? Version,
string? Purl);
private sealed record MergedVexEntry(
string Id,
string State,
IReadOnlyList<string> Affected);
}

View File

@@ -12,3 +12,5 @@ Source of truth: `docs/implplan/SPRINT_20260112_003_BE_csproj_audit_pending_appl
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| SPRINT-20260208-062-VEXREACH-001 | DONE | Added `POST /api/v1/scans/vex-reachability/filter` endpoint and deterministic matrix annotations for findings (2026-02-08). |
| SPRINT-20260208-063-TRIAGE-001 | DONE | Implement triage cluster batch action and cluster statistics endpoints for sprint 063 (2026-02-08). |
| HOT-003 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: wired SBOM ingestion projection writes into Scanner WebService pipeline. |
| HOT-004 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: added SBOM hot-lookup read endpoints with bounded pagination. |

View File

@@ -1,5 +1,5 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk.Worker">
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
@@ -14,9 +14,7 @@
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" />
<PackageReference Include="OpenTelemetry.Instrumentation.Process" />
</ItemGroup>
<ItemGroup>
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup>
<!-- FrameworkReference Microsoft.AspNetCore.App is provided by Sdk.Web -->
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Worker.Health/StellaOps.Worker.Health.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />

View File

@@ -111,6 +111,7 @@ public sealed class BuildProvenanceAnalyzer : IBuildProvenanceVerifier
BuilderId = chain.BuilderId,
SourceRepository = chain.SourceRepository,
SourceCommit = chain.SourceCommit,
SourceTrack = chain.SourceTrack,
GeneratedAtUtc = DateTimeOffset.UtcNow
};

View File

@@ -35,6 +35,69 @@ public sealed class BuildProvenanceChainBuilder
"revision"
};
private static readonly string[] SourceRefKeys =
{
"sourceRef",
"ref",
"gitRef",
"git.ref"
};
private static readonly string[] ReviewCountKeys =
{
"sourceReviewCount",
"reviewCount",
"pullRequestReviewCount",
"source.reviewCount"
};
private static readonly string[] ApproverIdsKeys =
{
"sourceApproverIds",
"approverIds",
"pullRequestApprovers",
"source.approvers"
};
private static readonly string[] AuthorIdKeys =
{
"sourceAuthorId",
"authorId",
"pullRequestAuthor",
"source.author"
};
private static readonly string[] MergedByIdKeys =
{
"sourceMergedById",
"mergedById",
"pullRequestMergedBy",
"source.mergedBy"
};
private static readonly string[] BranchProtectedKeys =
{
"sourceBranchProtected",
"branchProtected",
"source.branchProtected"
};
private static readonly string[] StatusChecksPassedKeys =
{
"sourceStatusChecksPassed",
"statusChecksPassed",
"ciChecksPassed",
"source.statusChecksPassed"
};
private static readonly string[] PolicyHashKeys =
{
"sourcePolicyHash",
"policyHash",
"branchProtectionPolicyHash",
"source.policyHash"
};
public BuildProvenanceChain Build(ParsedSbom sbom)
{
ArgumentNullException.ThrowIfNull(sbom);
@@ -47,6 +110,7 @@ public sealed class BuildProvenanceChainBuilder
?? buildInfo?.BuildType;
var sourceRepo = FindParameter(buildInfo, SourceRepoKeys);
var sourceCommit = FindParameter(buildInfo, SourceCommitKeys);
var sourceRef = FindParameter(buildInfo, SourceRefKeys);
var configUri = buildInfo?.ConfigSourceUri ?? buildInfo?.ConfigSourceEntrypoint;
var configDigest = buildInfo?.ConfigSourceDigest;
@@ -116,6 +180,17 @@ public sealed class BuildProvenanceChainBuilder
BuilderId = builderId,
SourceRepository = sourceRepo,
SourceCommit = sourceCommit,
SourceTrack = new SourceTrackEvidence
{
Reference = sourceRef,
ReviewCount = FindIntParameter(buildInfo, ReviewCountKeys),
ApproverIds = FindListParameter(buildInfo, ApproverIdsKeys),
AuthorId = FindParameter(buildInfo, AuthorIdKeys),
MergedById = FindParameter(buildInfo, MergedByIdKeys),
BranchProtected = FindBoolParameter(buildInfo, BranchProtectedKeys),
StatusChecksPassed = FindBoolParameter(buildInfo, StatusChecksPassedKeys),
PolicyHash = FindParameter(buildInfo, PolicyHashKeys)
},
BuildConfigUri = configUri,
BuildConfigDigest = configDigest,
Environment = environment,
@@ -124,6 +199,44 @@ public sealed class BuildProvenanceChainBuilder
};
}
private static bool? FindBoolParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
var value = FindParameter(buildInfo, keys);
if (value is null)
{
return null;
}
return bool.TryParse(value, out var parsed) ? parsed : null;
}
private static int? FindIntParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
var value = FindParameter(buildInfo, keys);
if (value is null)
{
return null;
}
return int.TryParse(value, out var parsed) ? parsed : null;
}
private static ImmutableArray<string> FindListParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
var value = FindParameter(buildInfo, keys);
if (string.IsNullOrWhiteSpace(value))
{
return [];
}
return value
.Split([',', ';', '|'], StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Where(entry => !string.IsNullOrWhiteSpace(entry))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(entry => entry, StringComparer.Ordinal)
.ToImmutableArray();
}
private static string? FindParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
if (buildInfo?.Parameters is null || buildInfo.Parameters.IsEmpty)

View File

@@ -1,6 +1,8 @@
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Scanner.BuildProvenance.Models;
using StellaOps.Scanner.BuildProvenance.Policy;
using System.Collections.Immutable;
using System.Globalization;
namespace StellaOps.Scanner.BuildProvenance.Analyzers;
@@ -60,7 +62,7 @@ public sealed class SourceVerifier
if (policy.SourceRequirements.RequireTaggedRelease)
{
var reference = FindParameter(sbom.BuildInfo, RefKeys);
var reference = chain.SourceTrack.Reference ?? FindParameter(sbom.BuildInfo, RefKeys);
if (!IsTagReference(reference))
{
findings.Add(BuildFinding(
@@ -72,6 +74,82 @@ public sealed class SourceVerifier
}
}
var sourceTrack = chain.SourceTrack;
var sourceRequirements = policy.SourceRequirements;
if (sourceRequirements.MinimumReviewApprovals > 0)
{
var reviewCount = sourceTrack.ReviewCount ?? sourceTrack.ApproverIds.Length;
if (reviewCount < sourceRequirements.MinimumReviewApprovals)
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Insufficient source review approvals",
$"Policy requires at least {sourceRequirements.MinimumReviewApprovals} review approvals but found {reviewCount}.",
subject: chain.SourceCommit ?? chain.SourceRepository,
metadata: BuildMetadata(
("minimumReviewApprovals", sourceRequirements.MinimumReviewApprovals.ToString(CultureInfo.InvariantCulture)),
("actualReviewApprovals", reviewCount.ToString(CultureInfo.InvariantCulture)),
("approverIds", string.Join(",", sourceTrack.ApproverIds)))));
}
}
if (sourceRequirements.RequireNoSelfMerge)
{
if (string.IsNullOrWhiteSpace(sourceTrack.AuthorId) || string.IsNullOrWhiteSpace(sourceTrack.MergedById))
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Missing author or merge actor identity",
"Policy requires author and merge actor identities to enforce no-self-merge controls.",
subject: chain.SourceCommit ?? chain.SourceRepository));
}
else if (string.Equals(sourceTrack.AuthorId, sourceTrack.MergedById, StringComparison.OrdinalIgnoreCase))
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Self-merge detected",
"Policy requires two-party review and prohibits self-merge.",
subject: chain.SourceCommit ?? chain.SourceRepository,
metadata: BuildMetadata(
("authorId", sourceTrack.AuthorId),
("mergedById", sourceTrack.MergedById))));
}
}
if (sourceRequirements.RequireProtectedBranch && sourceTrack.BranchProtected != true)
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Protected branch control missing",
"Policy requires verified protected-branch controls for the promoted source revision.",
subject: sourceTrack.Reference ?? chain.SourceCommit ?? chain.SourceRepository));
}
if (sourceRequirements.RequireStatusChecksPassed && sourceTrack.StatusChecksPassed != true)
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Required status checks not satisfied",
"Policy requires mandatory source status checks to pass before build/promotion.",
subject: sourceTrack.Reference ?? chain.SourceCommit ?? chain.SourceRepository));
}
if (sourceRequirements.RequirePolicyHash && string.IsNullOrWhiteSpace(sourceTrack.PolicyHash))
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Missing source policy hash",
"Policy hash must be present so source governance can be attested and replayed.",
subject: sourceTrack.Reference ?? chain.SourceCommit ?? chain.SourceRepository));
}
if (string.IsNullOrWhiteSpace(chain.SourceRepository))
{
findings.Add(BuildFinding(
@@ -85,6 +163,27 @@ public sealed class SourceVerifier
return findings;
}
private static ImmutableDictionary<string, string> BuildMetadata(params (string Key, string Value)[] entries)
{
if (entries.Length == 0)
{
return ImmutableDictionary<string, string>.Empty;
}
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
foreach (var entry in entries)
{
if (string.IsNullOrWhiteSpace(entry.Key) || string.IsNullOrWhiteSpace(entry.Value))
{
continue;
}
builder[entry.Key] = entry.Value;
}
return builder.ToImmutable();
}
private static bool IsSigned(ParsedBuildInfo? buildInfo)
{
if (buildInfo?.Parameters is null || buildInfo.Parameters.IsEmpty)
@@ -158,7 +257,8 @@ public sealed class SourceVerifier
ProvenanceSeverity severity,
string title,
string description,
string? subject)
string? subject,
ImmutableDictionary<string, string>? metadata = null)
{
return new ProvenanceFinding
{
@@ -166,7 +266,8 @@ public sealed class SourceVerifier
Severity = severity,
Title = title,
Description = description,
Subject = subject
Subject = subject,
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty
};
}
}

View File

@@ -18,6 +18,7 @@ public sealed record BuildProvenanceChain
{
public static BuildProvenanceChain Empty { get; } = new()
{
SourceTrack = SourceTrackEvidence.Empty,
Environment = ImmutableDictionary<string, string>.Empty,
Inputs = [],
Outputs = []
@@ -26,6 +27,7 @@ public sealed record BuildProvenanceChain
public string? BuilderId { get; init; }
public string? SourceRepository { get; init; }
public string? SourceCommit { get; init; }
public SourceTrackEvidence SourceTrack { get; init; } = SourceTrackEvidence.Empty;
public string? BuildConfigUri { get; init; }
public string? BuildConfigDigest { get; init; }
public ImmutableDictionary<string, string> Environment { get; init; } =
@@ -34,6 +36,23 @@ public sealed record BuildProvenanceChain
public ImmutableArray<BuildOutput> Outputs { get; init; } = [];
}
public sealed record SourceTrackEvidence
{
public static SourceTrackEvidence Empty { get; } = new()
{
ApproverIds = []
};
public string? Reference { get; init; }
public int? ReviewCount { get; init; }
public ImmutableArray<string> ApproverIds { get; init; } = [];
public string? AuthorId { get; init; }
public string? MergedById { get; init; }
public bool? BranchProtected { get; init; }
public bool? StatusChecksPassed { get; init; }
public string? PolicyHash { get; init; }
}
public sealed record BuildInput
{
public required string Reference { get; init; }
@@ -56,6 +75,7 @@ public sealed record BuildProvenanceAttestation
public string? BuilderId { get; init; }
public string? SourceRepository { get; init; }
public string? SourceCommit { get; init; }
public SourceTrackEvidence SourceTrack { get; init; } = SourceTrackEvidence.Empty;
public DateTimeOffset GeneratedAtUtc { get; init; } = DateTimeOffset.UtcNow;
}
@@ -82,7 +102,8 @@ public enum BuildProvenanceFindingType
NonReproducibleBuild,
SlsaLevelInsufficient,
InputIntegrityFailed,
OutputMismatch
OutputMismatch,
SourcePolicyFailed
}
public enum ProvenanceSeverity

View File

@@ -24,6 +24,11 @@ public sealed record SourceRequirements
{
public bool RequireSignedCommits { get; init; }
public bool RequireTaggedRelease { get; init; }
public int MinimumReviewApprovals { get; init; }
public bool RequireNoSelfMerge { get; init; }
public bool RequireProtectedBranch { get; init; }
public bool RequireStatusChecksPassed { get; init; }
public bool RequirePolicyHash { get; init; }
public ImmutableArray<string> AllowedRepositories { get; init; } = [];
}
@@ -58,6 +63,11 @@ public static class BuildProvenancePolicyDefaults
{
RequireSignedCommits = false,
RequireTaggedRelease = false,
MinimumReviewApprovals = 0,
RequireNoSelfMerge = false,
RequireProtectedBranch = false,
RequireStatusChecksPassed = false,
RequirePolicyHash = false,
AllowedRepositories = []
},
BuildRequirements = new BuildRequirements

View File

@@ -36,7 +36,18 @@ public static class BuildProvenanceReportFormatter
source = new
{
repository = report.ProvenanceChain.SourceRepository,
commit = report.ProvenanceChain.SourceCommit
reference = report.ProvenanceChain.SourceTrack.Reference,
commit = report.ProvenanceChain.SourceCommit,
policyHash = report.ProvenanceChain.SourceTrack.PolicyHash,
review = new
{
count = report.ProvenanceChain.SourceTrack.ReviewCount,
approvers = report.ProvenanceChain.SourceTrack.ApproverIds,
authorId = report.ProvenanceChain.SourceTrack.AuthorId,
mergedById = report.ProvenanceChain.SourceTrack.MergedById,
branchProtected = report.ProvenanceChain.SourceTrack.BranchProtected,
statusChecksPassed = report.ProvenanceChain.SourceTrack.StatusChecksPassed
}
},
buildConfig = new
{

View File

@@ -6,3 +6,4 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Scanner/__Libraries/StellaOps.Scanner.BuildProvenance/StellaOps.Scanner.BuildProvenance.md. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| STS-002 | DONE | SPRINT_20260210_004 - Added Source Track policy controls, chain capture fields, and fail-closed source policy findings. |

View File

@@ -0,0 +1,45 @@
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Row model for scanner.artifact_boms hot-lookup projection.
/// </summary>
public sealed class ArtifactBomRow
{
public string BuildId { get; set; } = default!;
public string CanonicalBomSha256 { get; set; } = default!;
public string PayloadDigest { get; set; } = default!;
public DateTimeOffset InsertedAt { get; set; }
public string? RawBomRef { get; set; }
public string? CanonicalBomRef { get; set; }
public string? DsseEnvelopeRef { get; set; }
public string? MergedVexRef { get; set; }
public string? CanonicalBomJson { get; set; }
public string? MergedVexJson { get; set; }
public string? AttestationsJson { get; set; }
public int EvidenceScore { get; set; }
public string? RekorTileId { get; set; }
public string? PendingMergedVexJson { get; set; }
}
/// <summary>
/// Result row for retention operations over artifact_boms partitions.
/// </summary>
public sealed class ArtifactBomPartitionDropRow
{
public string PartitionName { get; set; } = string.Empty;
public bool Dropped { get; set; }
}

View File

@@ -76,6 +76,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<EntryTraceRepository>();
services.AddScoped<RubyPackageInventoryRepository>();
services.AddScoped<BunPackageInventoryRepository>();
services.AddScoped<IArtifactBomRepository, PostgresArtifactBomRepository>();
services.TryAddSingleton<IClassificationHistoryRepository, ClassificationHistoryRepository>();
services.TryAddSingleton<IClassificationChangeTracker, ClassificationChangeTracker>();
services.AddScoped<IProofSpineRepository, PostgresProofSpineRepository>();

View File

@@ -0,0 +1,151 @@
-- SPDX-License-Identifier: BUSL-1.1
-- Copyright (c) 2026 StellaOps
-- Sprint: SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract
-- Task: HOT-002
--
-- Scanner hot-lookup projection for SBOM/attestation metadata.
-- Authoritative full payloads remain in CAS/object storage.
CREATE TABLE IF NOT EXISTS scanner.artifact_boms (
build_id TEXT NOT NULL,
canonical_bom_sha256 TEXT NOT NULL,
payload_digest TEXT NOT NULL,
inserted_at TIMESTAMPTZ NOT NULL DEFAULT now(),
raw_bom_ref TEXT,
canonical_bom_ref TEXT,
dsse_envelope_ref TEXT,
merged_vex_ref TEXT,
canonical_bom JSONB,
merged_vex JSONB,
attestations JSONB,
evidence_score INTEGER NOT NULL DEFAULT 0,
rekor_tile_id TEXT,
PRIMARY KEY (build_id, inserted_at)
) PARTITION BY RANGE (inserted_at);
COMMENT ON TABLE scanner.artifact_boms IS
'Monthly-partitioned Scanner SBOM/attestation hot-lookup projection for digest/component/triage queries.';
CREATE INDEX IF NOT EXISTS ix_artifact_boms_payload_digest
ON scanner.artifact_boms (payload_digest, inserted_at DESC);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_canonical_sha
ON scanner.artifact_boms (canonical_bom_sha256);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_inserted_at
ON scanner.artifact_boms (inserted_at DESC);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_canonical_gin
ON scanner.artifact_boms USING GIN (canonical_bom jsonb_path_ops);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_merged_vex_gin
ON scanner.artifact_boms USING GIN (merged_vex jsonb_path_ops);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_pending_vex
ON scanner.artifact_boms USING GIN (merged_vex jsonb_path_ops)
WHERE jsonb_path_exists(
merged_vex,
'$[*] ? (@.state == "unknown" || @.state == "triage_pending")');
CREATE OR REPLACE FUNCTION scanner.create_artifact_boms_partition(p_year INT, p_month INT)
RETURNS TEXT AS $$
DECLARE
v_start DATE;
v_end DATE;
v_partition_name TEXT;
BEGIN
IF p_month < 1 OR p_month > 12 THEN
RAISE EXCEPTION 'Invalid month % (expected 1-12)', p_month;
END IF;
v_start := make_date(p_year, p_month, 1);
v_end := (v_start + INTERVAL '1 month')::DATE;
v_partition_name := format('artifact_boms_%s_%s', p_year, lpad(p_month::TEXT, 2, '0'));
EXECUTE format(
'CREATE TABLE IF NOT EXISTS scanner.%I PARTITION OF scanner.artifact_boms FOR VALUES FROM (%L) TO (%L)',
v_partition_name,
v_start::TIMESTAMPTZ,
v_end::TIMESTAMPTZ);
RETURN v_partition_name;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION scanner.create_artifact_boms_partition IS
'Creates a monthly partition for scanner.artifact_boms and returns the partition name.';
CREATE OR REPLACE FUNCTION scanner.ensure_artifact_boms_future_partitions(p_months_ahead INT DEFAULT 1)
RETURNS TABLE(partition_name TEXT) AS $$
DECLARE
v_base_month DATE;
v_current DATE;
v_month_offset INT;
BEGIN
IF p_months_ahead < 0 THEN
RAISE EXCEPTION 'p_months_ahead must be >= 0';
END IF;
v_base_month := date_trunc('month', now() AT TIME ZONE 'UTC')::DATE;
FOR v_month_offset IN 0..p_months_ahead LOOP
v_current := (v_base_month + (v_month_offset || ' months')::INTERVAL)::DATE;
partition_name := scanner.create_artifact_boms_partition(
EXTRACT(YEAR FROM v_current)::INT,
EXTRACT(MONTH FROM v_current)::INT);
RETURN NEXT;
END LOOP;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION scanner.ensure_artifact_boms_future_partitions IS
'Ensures current and upcoming scanner.artifact_boms monthly partitions exist.';
CREATE OR REPLACE FUNCTION scanner.drop_artifact_boms_partitions_older_than(
p_retain_months INT DEFAULT 12,
p_dry_run BOOLEAN DEFAULT FALSE)
RETURNS TABLE(partition_name TEXT, dropped BOOLEAN) AS $$
DECLARE
v_cutoff DATE;
v_partition RECORD;
BEGIN
IF p_retain_months < 1 THEN
RAISE EXCEPTION 'p_retain_months must be >= 1';
END IF;
v_cutoff := (date_trunc('month', now() AT TIME ZONE 'UTC')::DATE - (p_retain_months || ' months')::INTERVAL)::DATE;
FOR v_partition IN
SELECT c.relname AS relname
FROM pg_inherits i
JOIN pg_class c ON c.oid = i.inhrelid
JOIN pg_class p ON p.oid = i.inhparent
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE p.relname = 'artifact_boms'
AND n.nspname = 'scanner'
AND c.relname ~ '^artifact_boms_[0-9]{4}_[0-9]{2}$'
AND to_date(substring(c.relname from 'artifact_boms_([0-9]{4}_[0-9]{2})'), 'YYYY_MM') < v_cutoff
ORDER BY c.relname
LOOP
partition_name := v_partition.relname;
IF p_dry_run THEN
dropped := FALSE;
RETURN NEXT;
ELSE
EXECUTE format('DROP TABLE IF EXISTS scanner.%I', v_partition.relname);
dropped := TRUE;
RETURN NEXT;
END IF;
END LOOP;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION scanner.drop_artifact_boms_partitions_older_than IS
'Drops scanner.artifact_boms monthly partitions older than retain window; supports dry-run mode.';
-- Ensure current and next month partitions exist so month-boundary ingest does not fail.
SELECT scanner.ensure_artifact_boms_future_partitions(1);

View File

@@ -26,5 +26,7 @@ internal static class MigrationIds
public const string SbomSources = "020_sbom_sources.sql";
public const string SecretDetectionSettings = "021_secret_detection_settings.sql";
public const string ReachabilityEvidence = "022_reachability_evidence.sql";
public const string RuntimeObservations = "023_runtime_observations.sql";
public const string ScoreHistory = "024_score_history.sql";
public const string ArtifactBomsHotLookup = "025_artifact_boms_hot_lookup.sql";
}

View File

@@ -0,0 +1,425 @@
using Dapper;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
/// <summary>
/// PostgreSQL implementation for scanner.artifact_boms hot-lookup projection queries.
/// </summary>
public sealed class PostgresArtifactBomRepository : IArtifactBomRepository
{
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresArtifactBomRepository> _logger;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string TableName => $"{SchemaName}.artifact_boms";
public PostgresArtifactBomRepository(
ScannerDataSource dataSource,
ILogger<PostgresArtifactBomRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<ArtifactBomRow> UpsertMonthlyAsync(ArtifactBomRow row, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(row);
ArgumentException.ThrowIfNullOrWhiteSpace(row.BuildId);
ArgumentException.ThrowIfNullOrWhiteSpace(row.CanonicalBomSha256);
ArgumentException.ThrowIfNullOrWhiteSpace(row.PayloadDigest);
var insertedAt = row.InsertedAt == default
? DateTimeOffset.UtcNow
: row.InsertedAt.ToUniversalTime();
var monthStart = new DateTimeOffset(insertedAt.Year, insertedAt.Month, 1, 0, 0, 0, TimeSpan.Zero);
var monthEnd = monthStart.AddMonths(1);
var lockKey = $"{row.CanonicalBomSha256}|{row.PayloadDigest}|{monthStart:yyyy-MM}";
const string selectExistingTemplate = """
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
raw_bom_ref AS RawBomRef,
canonical_bom_ref AS CanonicalBomRef,
dsse_envelope_ref AS DsseEnvelopeRef,
merged_vex_ref AS MergedVexRef,
canonical_bom::text AS CanonicalBomJson,
merged_vex::text AS MergedVexJson,
attestations::text AS AttestationsJson,
evidence_score AS EvidenceScore,
rekor_tile_id AS RekorTileId
FROM {0}
WHERE canonical_bom_sha256 = @CanonicalBomSha256
AND payload_digest = @PayloadDigest
AND inserted_at >= @MonthStart
AND inserted_at < @MonthEnd
ORDER BY inserted_at DESC, build_id ASC
LIMIT 1
FOR UPDATE
""";
var selectExistingSql = string.Format(selectExistingTemplate, TableName);
var updateExistingSql = $"""
UPDATE {TableName}
SET
raw_bom_ref = @RawBomRef,
canonical_bom_ref = @CanonicalBomRef,
dsse_envelope_ref = @DsseEnvelopeRef,
merged_vex_ref = @MergedVexRef,
canonical_bom = @CanonicalBomJson::jsonb,
merged_vex = @MergedVexJson::jsonb,
attestations = @AttestationsJson::jsonb,
evidence_score = @EvidenceScore,
rekor_tile_id = @RekorTileId
WHERE build_id = @BuildId
AND inserted_at = @InsertedAt
""";
var insertSql = $"""
INSERT INTO {TableName} (
build_id,
canonical_bom_sha256,
payload_digest,
inserted_at,
raw_bom_ref,
canonical_bom_ref,
dsse_envelope_ref,
merged_vex_ref,
canonical_bom,
merged_vex,
attestations,
evidence_score,
rekor_tile_id
) VALUES (
@BuildId,
@CanonicalBomSha256,
@PayloadDigest,
@InsertedAt,
@RawBomRef,
@CanonicalBomRef,
@DsseEnvelopeRef,
@MergedVexRef,
@CanonicalBomJson::jsonb,
@MergedVexJson::jsonb,
@AttestationsJson::jsonb,
@EvidenceScore,
@RekorTileId
)
ON CONFLICT (build_id, inserted_at) DO UPDATE SET
canonical_bom_sha256 = EXCLUDED.canonical_bom_sha256,
payload_digest = EXCLUDED.payload_digest,
raw_bom_ref = EXCLUDED.raw_bom_ref,
canonical_bom_ref = EXCLUDED.canonical_bom_ref,
dsse_envelope_ref = EXCLUDED.dsse_envelope_ref,
merged_vex_ref = EXCLUDED.merged_vex_ref,
canonical_bom = EXCLUDED.canonical_bom,
merged_vex = EXCLUDED.merged_vex,
attestations = EXCLUDED.attestations,
evidence_score = EXCLUDED.evidence_score,
rekor_tile_id = EXCLUDED.rekor_tile_id
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
var command = new CommandDefinition(
"SELECT pg_advisory_xact_lock(hashtext(@LockKey));",
new { LockKey = lockKey },
transaction,
cancellationToken: cancellationToken);
await connection.ExecuteAsync(command).ConfigureAwait(false);
var existing = await connection.QuerySingleOrDefaultAsync<ArtifactBomRow>(
new CommandDefinition(
selectExistingSql,
new
{
row.CanonicalBomSha256,
row.PayloadDigest,
MonthStart = monthStart,
MonthEnd = monthEnd
},
transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
if (existing is not null)
{
await connection.ExecuteAsync(
new CommandDefinition(
updateExistingSql,
new
{
BuildId = existing.BuildId,
InsertedAt = existing.InsertedAt,
row.RawBomRef,
row.CanonicalBomRef,
row.DsseEnvelopeRef,
row.MergedVexRef,
row.CanonicalBomJson,
row.MergedVexJson,
row.AttestationsJson,
row.EvidenceScore,
row.RekorTileId
},
transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
existing.RawBomRef = row.RawBomRef;
existing.CanonicalBomRef = row.CanonicalBomRef;
existing.DsseEnvelopeRef = row.DsseEnvelopeRef;
existing.MergedVexRef = row.MergedVexRef;
existing.CanonicalBomJson = row.CanonicalBomJson;
existing.MergedVexJson = row.MergedVexJson;
existing.AttestationsJson = row.AttestationsJson;
existing.EvidenceScore = row.EvidenceScore;
existing.RekorTileId = row.RekorTileId;
return existing;
}
await connection.ExecuteAsync(
new CommandDefinition(
insertSql,
new
{
row.BuildId,
row.CanonicalBomSha256,
row.PayloadDigest,
InsertedAt = insertedAt,
row.RawBomRef,
row.CanonicalBomRef,
row.DsseEnvelopeRef,
row.MergedVexRef,
row.CanonicalBomJson,
row.MergedVexJson,
row.AttestationsJson,
row.EvidenceScore,
row.RekorTileId
},
transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
row.InsertedAt = insertedAt;
return row;
}
public async Task<ArtifactBomRow?> TryGetLatestByPayloadDigestAsync(
string payloadDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(payloadDigest);
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore,
rekor_tile_id AS RekorTileId
FROM {TableName}
WHERE payload_digest = @PayloadDigest
ORDER BY inserted_at DESC, build_id ASC
LIMIT 1
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
return await connection.QuerySingleOrDefaultAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new { PayloadDigest = payloadDigest.Trim() },
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
public async Task<IReadOnlyList<ArtifactBomRow>> FindByComponentPurlAsync(
string purl,
int limit,
int offset,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
ValidatePagination(limit, offset);
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore
FROM {TableName}
WHERE jsonb_path_exists(
canonical_bom,
'$.components[*] ? (@.purl == $purl)',
jsonb_build_object('purl', to_jsonb(@Purl::text)))
ORDER BY inserted_at DESC, build_id ASC
LIMIT @Limit
OFFSET @Offset
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new { Purl = purl.Trim(), Limit = limit, Offset = offset },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
public async Task<IReadOnlyList<ArtifactBomRow>> FindByComponentNameAsync(
string componentName,
string? minVersion,
int limit,
int offset,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(componentName);
ValidatePagination(limit, offset);
var hasMinVersion = !string.IsNullOrWhiteSpace(minVersion);
var jsonPath = hasMinVersion
? "$.components[*] ? (@.name == $name && @.version >= $minVersion)"
: "$.components[*] ? (@.name == $name)";
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore
FROM {TableName}
WHERE jsonb_path_exists(
canonical_bom,
@JsonPath::jsonpath,
jsonb_build_object(
'name', to_jsonb(@Name::text),
'minVersion', to_jsonb(@MinVersion::text)))
ORDER BY inserted_at DESC, build_id ASC
LIMIT @Limit
OFFSET @Offset
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new
{
JsonPath = jsonPath,
Name = componentName.Trim().ToLowerInvariant(),
MinVersion = minVersion?.Trim() ?? string.Empty,
Limit = limit,
Offset = offset
},
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
public async Task<IReadOnlyList<ArtifactBomRow>> FindPendingTriageAsync(
int limit,
int offset,
CancellationToken cancellationToken = default)
{
ValidatePagination(limit, offset);
const string PendingPath = "$[*] ? (@.state == \"unknown\" || @.state == \"triage_pending\")";
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore,
jsonb_path_query_array(merged_vex, @PendingPath::jsonpath)::text AS PendingMergedVexJson
FROM {TableName}
WHERE jsonb_path_exists(merged_vex, @PendingPath::jsonpath)
ORDER BY inserted_at DESC, build_id ASC
LIMIT @Limit
OFFSET @Offset
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new { PendingPath, Limit = limit, Offset = offset },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
public async Task EnsureFuturePartitionsAsync(int monthsAhead, CancellationToken cancellationToken = default)
{
if (monthsAhead < 0)
{
throw new ArgumentOutOfRangeException(nameof(monthsAhead), "monthsAhead must be >= 0.");
}
var sql = $"SELECT partition_name FROM {SchemaName}.ensure_artifact_boms_future_partitions(@MonthsAhead);";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var partitions = await connection.QueryAsync<string>(
new CommandDefinition(
sql,
new { MonthsAhead = monthsAhead },
cancellationToken: cancellationToken)).ConfigureAwait(false);
_logger.LogInformation(
"Ensured scanner.artifact_boms partitions monthsAhead={MonthsAhead} createdOrVerified={Count}",
monthsAhead,
partitions.Count());
}
public async Task<IReadOnlyList<ArtifactBomPartitionDropRow>> DropOldPartitionsAsync(
int retainMonths,
bool dryRun,
CancellationToken cancellationToken = default)
{
if (retainMonths < 1)
{
throw new ArgumentOutOfRangeException(nameof(retainMonths), "retainMonths must be >= 1.");
}
var sql = $"""
SELECT
partition_name AS PartitionName,
dropped AS Dropped
FROM {SchemaName}.drop_artifact_boms_partitions_older_than(@RetainMonths, @DryRun)
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomPartitionDropRow>(
new CommandDefinition(
sql,
new { RetainMonths = retainMonths, DryRun = dryRun },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
private static void ValidatePagination(int limit, int offset)
{
if (limit <= 0 || limit > 500)
{
throw new ArgumentOutOfRangeException(nameof(limit), "limit must be between 1 and 500.");
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "offset must be >= 0.");
}
}
}

View File

@@ -0,0 +1,62 @@
using StellaOps.Scanner.Storage.Entities;
namespace StellaOps.Scanner.Storage.Repositories;
/// <summary>
/// Repository for Scanner SBOM/attestation hot-lookup projection rows.
/// </summary>
public interface IArtifactBomRepository
{
/// <summary>
/// Upserts a projection row in the current partition month window using
/// canonical hash and payload digest idempotency semantics.
/// </summary>
Task<ArtifactBomRow> UpsertMonthlyAsync(ArtifactBomRow row, CancellationToken cancellationToken = default);
/// <summary>
/// Returns the latest projection row for a payload digest.
/// </summary>
Task<ArtifactBomRow?> TryGetLatestByPayloadDigestAsync(
string payloadDigest,
CancellationToken cancellationToken = default);
/// <summary>
/// Finds projection rows containing a component with the specified PURL.
/// </summary>
Task<IReadOnlyList<ArtifactBomRow>> FindByComponentPurlAsync(
string purl,
int limit,
int offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Finds projection rows containing a component name and optional minimum version.
/// </summary>
Task<IReadOnlyList<ArtifactBomRow>> FindByComponentNameAsync(
string componentName,
string? minVersion,
int limit,
int offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Finds projection rows with pending triage states in merged VEX payloads.
/// </summary>
Task<IReadOnlyList<ArtifactBomRow>> FindPendingTriageAsync(
int limit,
int offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Ensures current/future monthly partitions exist.
/// </summary>
Task EnsureFuturePartitionsAsync(int monthsAhead, CancellationToken cancellationToken = default);
/// <summary>
/// Drops old partitions according to retention window (in months).
/// </summary>
Task<IReadOnlyList<ArtifactBomPartitionDropRow>> DropOldPartitionsAsync(
int retainMonths,
bool dryRun,
CancellationToken cancellationToken = default);
}

View File

@@ -6,3 +6,6 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.md. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| HOT-002 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: added `scanner.artifact_boms` partitioned schema + indexes + helper functions. |
| HOT-003 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: implemented ingestion projection and idempotent upsert flow. |
| HOT-005 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: delivered partition pre-create and retention maintenance jobs/assets. |

View File

@@ -6,6 +6,9 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Scanner.Triage.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />

View File

@@ -17,7 +17,12 @@ public sealed class BuildProvenancePolicyLoaderTests
"buildProvenancePolicy": {
"minimumSlsaLevel": 3,
"sourceRequirements": {
"requireSignedCommits": true
"requireSignedCommits": true,
"minimumReviewApprovals": 2,
"requireNoSelfMerge": true,
"requireProtectedBranch": true,
"requireStatusChecksPassed": true,
"requirePolicyHash": true
}
}
}
@@ -28,5 +33,10 @@ public sealed class BuildProvenancePolicyLoaderTests
Assert.Equal(3, policy.MinimumSlsaLevel);
Assert.True(policy.SourceRequirements.RequireSignedCommits);
Assert.Equal(2, policy.SourceRequirements.MinimumReviewApprovals);
Assert.True(policy.SourceRequirements.RequireNoSelfMerge);
Assert.True(policy.SourceRequirements.RequireProtectedBranch);
Assert.True(policy.SourceRequirements.RequireStatusChecksPassed);
Assert.True(policy.SourceRequirements.RequirePolicyHash);
}
}

View File

@@ -30,12 +30,28 @@ public sealed class BuildProvenanceReportFormatterTests
var report = new BuildProvenanceReport
{
AchievedLevel = SlsaLevel.Level2,
ProvenanceChain = BuildProvenanceChain.Empty
ProvenanceChain = BuildProvenanceChain.Empty with
{
SourceTrack = SourceTrackEvidence.Empty with
{
Reference = "refs/heads/main",
PolicyHash = "sha256:policy123",
ReviewCount = 2,
ApproverIds = ["approver-a", "approver-b"],
AuthorId = "author-a",
MergedById = "approver-a",
BranchProtected = true,
StatusChecksPassed = true
}
}
};
var json = BuildProvenanceReportFormatter.ToInTotoPredicateBytes(report);
var payload = Encoding.UTF8.GetString(json);
Assert.Contains("https://slsa.dev/provenance/v1", payload);
Assert.Contains("\"reference\":\"refs/heads/main\"", payload);
Assert.Contains("\"policyHash\":\"sha256:policy123\"", payload);
Assert.Contains("\"approvers\":[\"approver-a\",\"approver-b\"]", payload);
}
}

View File

@@ -0,0 +1,126 @@
using System.Linq;
using StellaOps.Scanner.BuildProvenance.Analyzers;
using StellaOps.Scanner.BuildProvenance.Models;
using StellaOps.Scanner.BuildProvenance.Policy;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.BuildProvenance.Tests;
public sealed class SourceVerifierTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Verify_RequiresMinimumReviewApprovals_FailsWhenBelowThreshold()
{
var sbom = TestSbomFactory.CreateSbom(
TestSbomFactory.CreateBuildInfo(builder =>
{
builder.WithParameter("sourceRepository", "https://git.example/stella/repo");
builder.WithParameter("sourceReviewCount", "1");
}));
var chain = new BuildProvenanceChainBuilder().Build(sbom);
var policy = BuildProvenancePolicyDefaults.Default with
{
SourceRequirements = BuildProvenancePolicyDefaults.Default.SourceRequirements with
{
MinimumReviewApprovals = 2
}
};
var findings = new SourceVerifier().Verify(sbom, chain, policy).ToArray();
var finding = Assert.Single(findings.Where(f => f.Type == BuildProvenanceFindingType.SourcePolicyFailed));
Assert.Equal("Insufficient source review approvals", finding.Title);
Assert.Equal("2", finding.Metadata["minimumReviewApprovals"]);
Assert.Equal("1", finding.Metadata["actualReviewApprovals"]);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Verify_RequireNoSelfMerge_FailsWhenAuthorMatchesMergeActor()
{
var sbom = TestSbomFactory.CreateSbom(
TestSbomFactory.CreateBuildInfo(builder =>
{
builder.WithParameter("sourceRepository", "https://git.example/stella/repo");
builder.WithParameter("sourceAuthorId", "alice");
builder.WithParameter("sourceMergedById", "alice");
}));
var chain = new BuildProvenanceChainBuilder().Build(sbom);
var policy = BuildProvenancePolicyDefaults.Default with
{
SourceRequirements = BuildProvenancePolicyDefaults.Default.SourceRequirements with
{
RequireNoSelfMerge = true
}
};
var findings = new SourceVerifier().Verify(sbom, chain, policy).ToArray();
var finding = Assert.Single(findings.Where(f => f.Type == BuildProvenanceFindingType.SourcePolicyFailed));
Assert.Equal("Self-merge detected", finding.Title);
Assert.Equal("alice", finding.Metadata["authorId"]);
Assert.Equal("alice", finding.Metadata["mergedById"]);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Verify_MinimumReviewApprovals_UsesApproverListWhenReviewCountMissing()
{
var sbom = TestSbomFactory.CreateSbom(
TestSbomFactory.CreateBuildInfo(builder =>
{
builder.WithParameter("sourceRepository", "https://git.example/stella/repo");
builder.WithParameter("sourceApproverIds", "approver-b,approver-a");
}));
var chain = new BuildProvenanceChainBuilder().Build(sbom);
var policy = BuildProvenancePolicyDefaults.Default with
{
SourceRequirements = BuildProvenancePolicyDefaults.Default.SourceRequirements with
{
MinimumReviewApprovals = 2
}
};
var findings = new SourceVerifier().Verify(sbom, chain, policy).ToArray();
Assert.DoesNotContain(findings, finding => finding.Type == BuildProvenanceFindingType.SourcePolicyFailed);
Assert.Equal(["approver-a", "approver-b"], chain.SourceTrack.ApproverIds);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Verify_RequireBranchStatusAndPolicyHash_PassesWhenSignalsPresent()
{
var sbom = TestSbomFactory.CreateSbom(
TestSbomFactory.CreateBuildInfo(builder =>
{
builder.WithParameter("sourceRepository", "https://git.example/stella/repo");
builder.WithParameter("sourceAuthorId", "alice");
builder.WithParameter("sourceMergedById", "bob");
builder.WithParameter("sourceBranchProtected", "true");
builder.WithParameter("sourceStatusChecksPassed", "true");
builder.WithParameter("sourcePolicyHash", "sha256:policy123");
}));
var chain = new BuildProvenanceChainBuilder().Build(sbom);
var policy = BuildProvenancePolicyDefaults.Default with
{
SourceRequirements = BuildProvenancePolicyDefaults.Default.SourceRequirements with
{
RequireNoSelfMerge = true,
RequireProtectedBranch = true,
RequireStatusChecksPassed = true,
RequirePolicyHash = true
}
};
var findings = new SourceVerifier().Verify(sbom, chain, policy).ToArray();
Assert.DoesNotContain(findings, finding => finding.Type == BuildProvenanceFindingType.SourcePolicyFailed);
}
}

View File

@@ -6,3 +6,4 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Scanner/__Tests/StellaOps.Scanner.BuildProvenance.Tests/StellaOps.Scanner.BuildProvenance.Tests.md. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| STS-005 | DONE | SPRINT_20260210_004 - Added SourceVerifier and formatter/policy tests for Source Track controls (execution blocked by pre-existing Policy.Determinization compile errors). |

View File

@@ -5,6 +5,7 @@ using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.Cache.Abstractions;
using StellaOps.Scanner.Cache.LayerCache;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
using Xunit;
namespace StellaOps.Scanner.Cache.Tests.LayerCache;

View File

@@ -11,6 +11,7 @@ using Xunit;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
namespace StellaOps.Scanner.Cache.Tests;
public sealed class LayerCacheRoundTripTests : IAsyncLifetime

View File

@@ -5,6 +5,7 @@ using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.Core.Configuration;
using StellaOps.Scanner.Core.TrustAnchors;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
using Xunit;
namespace StellaOps.Scanner.Core.Tests;

View File

@@ -11,6 +11,7 @@ using StellaOps.Scanner.Queue;
using Xunit;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
namespace StellaOps.Scanner.Queue.Tests;
public sealed class QueueLeaseIntegrationTests

View File

@@ -17,6 +17,7 @@ using StellaOps.Scanner.ReachabilityDrift.Attestation;
using Xunit;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
namespace StellaOps.Scanner.ReachabilityDrift.Tests;
public sealed class DriftAttestationServiceTests

View File

@@ -0,0 +1,184 @@
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Infrastructure.Postgres.Options;
using StellaOps.Scanner.Storage;
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Postgres;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.Storage.Tests;
[Collection("scanner-postgres")]
[Trait("Category", TestCategories.Integration)]
public sealed class ArtifactBomRepositoryTests : IAsyncLifetime
{
private readonly ScannerPostgresFixture _fixture;
private IArtifactBomRepository _repository = null!;
public ArtifactBomRepositoryTests(ScannerPostgresFixture fixture)
{
_fixture = fixture;
}
public async ValueTask InitializeAsync()
{
await _fixture.TruncateAllTablesAsync();
var options = new ScannerStorageOptions
{
Postgres = new PostgresOptions
{
ConnectionString = _fixture.ConnectionString,
SchemaName = _fixture.SchemaName
}
};
var dataSource = new ScannerDataSource(Options.Create(options), NullLogger<ScannerDataSource>.Instance);
_repository = new PostgresArtifactBomRepository(dataSource, NullLogger<PostgresArtifactBomRepository>.Instance);
await _repository.EnsureFuturePartitionsAsync(2);
}
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
[Fact]
public async Task UpsertMonthlyAsync_DuplicateCanonicalAndPayload_IsIdempotent()
{
var now = DateTimeOffset.UtcNow;
var digest = $"sha256:{Guid.NewGuid():N}";
var canonicalHash = $"sha256:{Guid.NewGuid():N}";
var first = CreateRow(
buildId: "build-a",
payloadDigest: digest,
canonicalHash: canonicalHash,
insertedAt: now,
evidenceScore: 42);
var second = CreateRow(
buildId: "build-b",
payloadDigest: digest,
canonicalHash: canonicalHash,
insertedAt: now.AddMinutes(1),
evidenceScore: 97);
var savedFirst = await _repository.UpsertMonthlyAsync(first);
var savedSecond = await _repository.UpsertMonthlyAsync(second);
savedSecond.BuildId.Should().Be(savedFirst.BuildId);
savedSecond.InsertedAt.Should().Be(savedFirst.InsertedAt);
var latest = await _repository.TryGetLatestByPayloadDigestAsync(digest);
latest.Should().NotBeNull();
latest!.BuildId.Should().Be(savedFirst.BuildId);
latest.EvidenceScore.Should().Be(97);
}
[Fact]
public async Task FindByComponentPurlAsync_ReturnsDeterministicDescendingOrder()
{
var now = DateTimeOffset.UtcNow;
const string purl = "pkg:deb/debian/openssl@3.0.12";
await _repository.UpsertMonthlyAsync(CreateRow("build-1", "sha256:payload-1", "sha256:canon-1", now));
await _repository.UpsertMonthlyAsync(CreateRow("build-2", "sha256:payload-2", "sha256:canon-2", now.AddMinutes(2)));
await _repository.UpsertMonthlyAsync(CreateRow("build-3", "sha256:payload-3", "sha256:canon-3", now.AddMinutes(1)));
var result = await _repository.FindByComponentPurlAsync(purl, limit: 10, offset: 0);
result.Should().HaveCount(3);
result.Select(row => row.BuildId).Should().ContainInOrder("build-2", "build-3", "build-1");
}
[Fact]
public async Task FindPendingTriageAsync_OnlyReturnsUnknownOrPendingRows()
{
var now = DateTimeOffset.UtcNow;
var pendingRow = CreateRow(
buildId: "build-pending",
payloadDigest: "sha256:payload-pending",
canonicalHash: "sha256:canon-pending",
insertedAt: now,
mergedVexJson: """[{"id":"CVE-2026-0001","state":"triage_pending"}]""");
var resolvedRow = CreateRow(
buildId: "build-resolved",
payloadDigest: "sha256:payload-resolved",
canonicalHash: "sha256:canon-resolved",
insertedAt: now.AddMinutes(1),
mergedVexJson: """[{"id":"CVE-2026-0002","state":"resolved"}]""");
await _repository.UpsertMonthlyAsync(pendingRow);
await _repository.UpsertMonthlyAsync(resolvedRow);
var pending = await _repository.FindPendingTriageAsync(limit: 20, offset: 0);
pending.Should().HaveCount(1);
pending[0].BuildId.Should().Be("build-pending");
pending[0].PendingMergedVexJson.Should().Contain("triage_pending");
}
[Fact]
public async Task HotLookupQueries_BenchmarkOnFixture_AreSubSecond()
{
var now = DateTimeOffset.UtcNow;
for (var i = 0; i < 300; i++)
{
var mergedVex = i % 5 == 0
? """[{"id":"CVE-2026-0001","state":"unknown"}]"""
: """[{"id":"CVE-2026-0001","state":"resolved"}]""";
await _repository.UpsertMonthlyAsync(CreateRow(
buildId: $"build-bench-{i:D4}",
payloadDigest: $"sha256:payload-bench-{i:D4}",
canonicalHash: $"sha256:canon-bench-{i:D4}",
insertedAt: now.AddSeconds(i),
mergedVexJson: mergedVex));
}
var payloadStopwatch = System.Diagnostics.Stopwatch.StartNew();
await _repository.TryGetLatestByPayloadDigestAsync("sha256:payload-bench-0299");
payloadStopwatch.Stop();
var purlStopwatch = System.Diagnostics.Stopwatch.StartNew();
await _repository.FindByComponentPurlAsync("pkg:deb/debian/openssl@3.0.12", 50, 0);
purlStopwatch.Stop();
var pendingStopwatch = System.Diagnostics.Stopwatch.StartNew();
await _repository.FindPendingTriageAsync(100, 0);
pendingStopwatch.Stop();
payloadStopwatch.ElapsedMilliseconds.Should().BeLessThan(1000);
purlStopwatch.ElapsedMilliseconds.Should().BeLessThan(1000);
pendingStopwatch.ElapsedMilliseconds.Should().BeLessThan(1000);
}
private static ArtifactBomRow CreateRow(
string buildId,
string payloadDigest,
string canonicalHash,
DateTimeOffset insertedAt,
int evidenceScore = 50,
string? mergedVexJson = """[{"id":"CVE-2026-0000","state":"resolved"}]""")
{
return new ArtifactBomRow
{
BuildId = buildId,
CanonicalBomSha256 = canonicalHash,
PayloadDigest = payloadDigest,
InsertedAt = insertedAt,
RawBomRef = $"cas://raw/{buildId}",
CanonicalBomRef = $"cas://canonical/{buildId}",
DsseEnvelopeRef = null,
MergedVexRef = $"cas://vex/{buildId}",
CanonicalBomJson = """{"format":"cyclonedx","components":[{"name":"openssl","version":"3.0.12","purl":"pkg:deb/debian/openssl@3.0.12"}]}""",
MergedVexJson = mergedVexJson,
AttestationsJson = "[]",
EvidenceScore = evidenceScore,
RekorTileId = null
};
}
}

View File

@@ -12,6 +12,7 @@ using Xunit;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
namespace StellaOps.Scanner.Storage.Tests;
[Collection("scanner-postgres")]

View File

@@ -6,3 +6,6 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.md. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| HOT-002 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: migration coverage for `scanner.artifact_boms` partition/index profile. |
| HOT-003 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: repository idempotent write-path coverage for canonical+payload inputs. |
| HOT-006 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: deterministic ordering and latency checks for hot-lookup query methods; local execution is Docker-gated in this environment. |

View File

@@ -183,7 +183,7 @@ public sealed class StackTraceExploitPathViewServiceTests
var view = _service.BuildView(request);
view.PathId.Should().Be("path:test-001");
view.Frames.Should().HaveCountGreaterOrEqualTo(2);
view.Frames.Should().HaveCountGreaterThanOrEqualTo(2);
view.Frames[0].Role.Should().Be(FrameRole.Entrypoint);
view.Frames[^1].Role.Should().Be(FrameRole.Sink);
}
@@ -422,7 +422,7 @@ public sealed class StackTraceExploitPathViewServiceTests
var path = CreateExploitPath();
var chain = StackTraceExploitPathViewService.ExtractCallChain(path);
chain.Should().HaveCountGreaterOrEqualTo(2);
chain.Should().HaveCountGreaterThanOrEqualTo(2);
chain[0].Symbol.Should().Be("POST /api/orders");
chain[^1].Symbol.Should().Be("SqlClient.Execute");
}

View File

@@ -9,6 +9,7 @@ using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Services;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;

View File

@@ -0,0 +1,213 @@
using System.Collections.Concurrent;
using System.Net;
using System.Net.Http.Json;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Scanner.Storage.ObjectStore;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
public sealed class SbomHotLookupEndpointsTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task HotLookupEndpoints_ReturnLatestComponentAndPendingRows()
{
using var secrets = new TestSurfaceSecretsScope();
await using var factory = await CreateFactoryAsync();
using var client = factory.CreateClient();
var (scanId, payloadDigest) = await CreateScanAsync(client, "sha256:hotlookup0001");
var sbomJson = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.7",
"version": 1,
"components": [
{
"type": "library",
"name": "openssl",
"version": "3.0.12",
"purl": "pkg:deb/debian/openssl@3.0.12"
}
],
"vulnerabilities": [
{
"id": "CVE-2026-0001",
"analysis": {
"state": "triage_pending"
},
"affects": [
{ "ref": "pkg:deb/debian/openssl@3.0.12" }
]
}
]
}
""";
var submitResponse = await client.PostAsync(
$"/api/v1/scans/{scanId}/sbom",
new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json"));
Assert.Equal(HttpStatusCode.Accepted, submitResponse.StatusCode);
var latest = await client.GetFromJsonAsync<SbomHotLookupLatestResponseDto>(
$"/api/v1/sbom/hot-lookup/payload/{payloadDigest}/latest");
Assert.NotNull(latest);
Assert.Equal(payloadDigest, latest!.PayloadDigest);
var components = await client.GetFromJsonAsync<SbomHotLookupComponentSearchResponseDto>(
"/api/v1/sbom/hot-lookup/components?purl=pkg:deb/debian/openssl@3.0.12&limit=20");
Assert.NotNull(components);
Assert.NotEmpty(components!.Items);
var pending = await client.GetFromJsonAsync<SbomHotLookupPendingSearchResponseDto>(
"/api/v1/sbom/hot-lookup/pending-triage?limit=20");
Assert.NotNull(pending);
Assert.NotEmpty(pending!.Items);
Assert.Equal(JsonValueKind.Array, pending.Items[0].Pending.ValueKind);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task HotLookupEndpoints_DuplicateCanonicalPayload_RemainsSingleProjectionRow()
{
using var secrets = new TestSurfaceSecretsScope();
await using var factory = await CreateFactoryAsync();
using var client = factory.CreateClient();
var (scanId, payloadDigest) = await CreateScanAsync(client, "sha256:hotlookup0002");
const string queryPurl = "pkg:deb/debian/openssl@3.0.12";
var first = """
{
"bomFormat": "CycloneDX",
"specVersion": "1.7",
"version": 1,
"components": [
{ "name": "openssl", "version": "3.0.12", "purl": "pkg:deb/debian/openssl@3.0.12" }
]
}
""";
var second = """
{
"specVersion": "1.7",
"components": [
{ "version": "3.0.12", "purl": "pkg:deb/debian/openssl@3.0.12", "name": "openssl" }
],
"version": 1,
"bomFormat": "CycloneDX"
}
""";
var firstSubmit = await client.PostAsync(
$"/api/v1/scans/{scanId}/sbom",
new StringContent(first, Encoding.UTF8, "application/vnd.cyclonedx+json"));
Assert.Equal(HttpStatusCode.Accepted, firstSubmit.StatusCode);
var firstLatest = await client.GetFromJsonAsync<SbomHotLookupLatestResponseDto>(
$"/api/v1/sbom/hot-lookup/payload/{payloadDigest}/latest");
Assert.NotNull(firstLatest);
var secondSubmit = await client.PostAsync(
$"/api/v1/scans/{scanId}/sbom",
new StringContent(second, Encoding.UTF8, "application/vnd.cyclonedx+json"));
Assert.Equal(HttpStatusCode.Accepted, secondSubmit.StatusCode);
var secondLatest = await client.GetFromJsonAsync<SbomHotLookupLatestResponseDto>(
$"/api/v1/sbom/hot-lookup/payload/{payloadDigest}/latest");
Assert.NotNull(secondLatest);
Assert.Equal(firstLatest!.CanonicalBomSha256, secondLatest!.CanonicalBomSha256);
var componentHits = await client.GetFromJsonAsync<SbomHotLookupComponentSearchResponseDto>(
$"/api/v1/sbom/hot-lookup/components?purl={queryPurl}");
Assert.NotNull(componentHits);
Assert.Single(componentHits!.Items);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task HotLookupEndpoints_InvalidComponentQuery_ReturnsBadRequest()
{
using var secrets = new TestSurfaceSecretsScope();
await using var factory = await CreateFactoryAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/sbom/hot-lookup/components?purl=a&name=b");
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
}
private static async Task<ScannerApplicationFactory> CreateFactoryAsync()
{
var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
}, configureServices: services =>
{
services.RemoveAll<IArtifactObjectStore>();
services.AddSingleton<IArtifactObjectStore>(new InMemoryArtifactObjectStore());
});
await factory.InitializeAsync();
return factory;
}
private static async Task<(string ScanId, string PayloadDigest)> CreateScanAsync(HttpClient client, string payloadDigest)
{
var response = await client.PostAsJsonAsync("/api/v1/scans", new ScanSubmitRequest
{
Image = new ScanImageDescriptor
{
Reference = "example.com/hotlookup:1.0",
Digest = payloadDigest
}
});
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(payload);
Assert.False(string.IsNullOrWhiteSpace(payload!.ScanId));
return (payload.ScanId, payloadDigest);
}
private sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
{
private readonly ConcurrentDictionary<string, byte[]> _objects = new(StringComparer.Ordinal);
public async Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(descriptor);
ArgumentNullException.ThrowIfNull(content);
using var buffer = new MemoryStream();
await content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
_objects[$"{descriptor.Bucket}:{descriptor.Key}"] = buffer.ToArray();
}
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(descriptor);
if (!_objects.TryGetValue($"{descriptor.Bucket}:{descriptor.Key}", out var bytes))
{
return Task.FromResult<Stream?>(null);
}
return Task.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
}
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(descriptor);
_objects.TryRemove($"{descriptor.Bucket}:{descriptor.Key}", out _);
return Task.CompletedTask;
}
}
}

View File

@@ -14,6 +14,7 @@ using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.WebService.Services;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
using Xunit;

View File

@@ -8,3 +8,5 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| SPRINT-20260208-062-VEXREACH-001 | DONE | Added deterministic unit coverage for VEX+reachability filter matrix and controller endpoint (`6` tests passed on filtered run, 2026-02-08). |
| SPRINT-20260208-063-TRIAGE-001 | DONE | Add endpoint tests for triage cluster inbox stats and batch triage actions (2026-02-08). |
| HOT-004 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: added endpoint tests for payload/component/pending triage hot-lookup APIs. |
| HOT-006 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: deterministic query ordering/latency coverage added; local execution is Docker-gated in this environment. |

View File

@@ -25,6 +25,7 @@ using Xunit;
using StellaOps.TestKit;
using FakeTimeProvider = Microsoft.Extensions.Time.Testing.FakeTimeProvider;
namespace StellaOps.Scanner.Worker.Tests;
public sealed class WorkerBasicScanScenarioTests

View File

@@ -54,6 +54,11 @@ buildProvenancePolicy:
sourceRequirements:
requireSignedCommits: true
requireTaggedRelease: false
minimumReviewApprovals: 2
requireNoSelfMerge: true
requireProtectedBranch: true
requireStatusChecksPassed: true
requirePolicyHash: true
allowedRepositories:
- "github.com/myorg/*"
- "gitlab.com/myorg/*"
@@ -77,6 +82,15 @@ buildProvenancePolicy:
slsaLevelOverride: 1
```
Source Track controls read deterministic build metadata parameters when present:
- `sourceReviewCount` or `sourceApproverIds` for review quorum.
- `sourceAuthorId` + `sourceMergedById` for no-self-merge enforcement.
- `sourceBranchProtected` and `sourceStatusChecksPassed` for branch/check gates.
- `sourcePolicyHash` to bind the governance policy snapshot into attestation outputs.
All required controls fail closed when policy enables them and metadata is absent.
## Findings
- MissingBuildProvenance
@@ -89,6 +103,7 @@ buildProvenancePolicy:
- SlsaLevelInsufficient
- InputIntegrityFailed
- OutputMismatch
- SourcePolicyFailed
## Outputs