save checkpoint

This commit is contained in:
master
2026-02-11 01:32:14 +02:00
parent 5593212b41
commit cf5b72974f
2316 changed files with 68799 additions and 3808 deletions

View File

@@ -111,6 +111,7 @@ public sealed class BuildProvenanceAnalyzer : IBuildProvenanceVerifier
BuilderId = chain.BuilderId,
SourceRepository = chain.SourceRepository,
SourceCommit = chain.SourceCommit,
SourceTrack = chain.SourceTrack,
GeneratedAtUtc = DateTimeOffset.UtcNow
};

View File

@@ -35,6 +35,69 @@ public sealed class BuildProvenanceChainBuilder
"revision"
};
private static readonly string[] SourceRefKeys =
{
"sourceRef",
"ref",
"gitRef",
"git.ref"
};
private static readonly string[] ReviewCountKeys =
{
"sourceReviewCount",
"reviewCount",
"pullRequestReviewCount",
"source.reviewCount"
};
private static readonly string[] ApproverIdsKeys =
{
"sourceApproverIds",
"approverIds",
"pullRequestApprovers",
"source.approvers"
};
private static readonly string[] AuthorIdKeys =
{
"sourceAuthorId",
"authorId",
"pullRequestAuthor",
"source.author"
};
private static readonly string[] MergedByIdKeys =
{
"sourceMergedById",
"mergedById",
"pullRequestMergedBy",
"source.mergedBy"
};
private static readonly string[] BranchProtectedKeys =
{
"sourceBranchProtected",
"branchProtected",
"source.branchProtected"
};
private static readonly string[] StatusChecksPassedKeys =
{
"sourceStatusChecksPassed",
"statusChecksPassed",
"ciChecksPassed",
"source.statusChecksPassed"
};
private static readonly string[] PolicyHashKeys =
{
"sourcePolicyHash",
"policyHash",
"branchProtectionPolicyHash",
"source.policyHash"
};
public BuildProvenanceChain Build(ParsedSbom sbom)
{
ArgumentNullException.ThrowIfNull(sbom);
@@ -47,6 +110,7 @@ public sealed class BuildProvenanceChainBuilder
?? buildInfo?.BuildType;
var sourceRepo = FindParameter(buildInfo, SourceRepoKeys);
var sourceCommit = FindParameter(buildInfo, SourceCommitKeys);
var sourceRef = FindParameter(buildInfo, SourceRefKeys);
var configUri = buildInfo?.ConfigSourceUri ?? buildInfo?.ConfigSourceEntrypoint;
var configDigest = buildInfo?.ConfigSourceDigest;
@@ -116,6 +180,17 @@ public sealed class BuildProvenanceChainBuilder
BuilderId = builderId,
SourceRepository = sourceRepo,
SourceCommit = sourceCommit,
SourceTrack = new SourceTrackEvidence
{
Reference = sourceRef,
ReviewCount = FindIntParameter(buildInfo, ReviewCountKeys),
ApproverIds = FindListParameter(buildInfo, ApproverIdsKeys),
AuthorId = FindParameter(buildInfo, AuthorIdKeys),
MergedById = FindParameter(buildInfo, MergedByIdKeys),
BranchProtected = FindBoolParameter(buildInfo, BranchProtectedKeys),
StatusChecksPassed = FindBoolParameter(buildInfo, StatusChecksPassedKeys),
PolicyHash = FindParameter(buildInfo, PolicyHashKeys)
},
BuildConfigUri = configUri,
BuildConfigDigest = configDigest,
Environment = environment,
@@ -124,6 +199,44 @@ public sealed class BuildProvenanceChainBuilder
};
}
private static bool? FindBoolParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
var value = FindParameter(buildInfo, keys);
if (value is null)
{
return null;
}
return bool.TryParse(value, out var parsed) ? parsed : null;
}
private static int? FindIntParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
var value = FindParameter(buildInfo, keys);
if (value is null)
{
return null;
}
return int.TryParse(value, out var parsed) ? parsed : null;
}
private static ImmutableArray<string> FindListParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
var value = FindParameter(buildInfo, keys);
if (string.IsNullOrWhiteSpace(value))
{
return [];
}
return value
.Split([',', ';', '|'], StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Where(entry => !string.IsNullOrWhiteSpace(entry))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(entry => entry, StringComparer.Ordinal)
.ToImmutableArray();
}
private static string? FindParameter(ParsedBuildInfo? buildInfo, IEnumerable<string> keys)
{
if (buildInfo?.Parameters is null || buildInfo.Parameters.IsEmpty)

View File

@@ -1,6 +1,8 @@
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Scanner.BuildProvenance.Models;
using StellaOps.Scanner.BuildProvenance.Policy;
using System.Collections.Immutable;
using System.Globalization;
namespace StellaOps.Scanner.BuildProvenance.Analyzers;
@@ -60,7 +62,7 @@ public sealed class SourceVerifier
if (policy.SourceRequirements.RequireTaggedRelease)
{
var reference = FindParameter(sbom.BuildInfo, RefKeys);
var reference = chain.SourceTrack.Reference ?? FindParameter(sbom.BuildInfo, RefKeys);
if (!IsTagReference(reference))
{
findings.Add(BuildFinding(
@@ -72,6 +74,82 @@ public sealed class SourceVerifier
}
}
var sourceTrack = chain.SourceTrack;
var sourceRequirements = policy.SourceRequirements;
if (sourceRequirements.MinimumReviewApprovals > 0)
{
var reviewCount = sourceTrack.ReviewCount ?? sourceTrack.ApproverIds.Length;
if (reviewCount < sourceRequirements.MinimumReviewApprovals)
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Insufficient source review approvals",
$"Policy requires at least {sourceRequirements.MinimumReviewApprovals} review approvals but found {reviewCount}.",
subject: chain.SourceCommit ?? chain.SourceRepository,
metadata: BuildMetadata(
("minimumReviewApprovals", sourceRequirements.MinimumReviewApprovals.ToString(CultureInfo.InvariantCulture)),
("actualReviewApprovals", reviewCount.ToString(CultureInfo.InvariantCulture)),
("approverIds", string.Join(",", sourceTrack.ApproverIds)))));
}
}
if (sourceRequirements.RequireNoSelfMerge)
{
if (string.IsNullOrWhiteSpace(sourceTrack.AuthorId) || string.IsNullOrWhiteSpace(sourceTrack.MergedById))
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Missing author or merge actor identity",
"Policy requires author and merge actor identities to enforce no-self-merge controls.",
subject: chain.SourceCommit ?? chain.SourceRepository));
}
else if (string.Equals(sourceTrack.AuthorId, sourceTrack.MergedById, StringComparison.OrdinalIgnoreCase))
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Self-merge detected",
"Policy requires two-party review and prohibits self-merge.",
subject: chain.SourceCommit ?? chain.SourceRepository,
metadata: BuildMetadata(
("authorId", sourceTrack.AuthorId),
("mergedById", sourceTrack.MergedById))));
}
}
if (sourceRequirements.RequireProtectedBranch && sourceTrack.BranchProtected != true)
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Protected branch control missing",
"Policy requires verified protected-branch controls for the promoted source revision.",
subject: sourceTrack.Reference ?? chain.SourceCommit ?? chain.SourceRepository));
}
if (sourceRequirements.RequireStatusChecksPassed && sourceTrack.StatusChecksPassed != true)
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Required status checks not satisfied",
"Policy requires mandatory source status checks to pass before build/promotion.",
subject: sourceTrack.Reference ?? chain.SourceCommit ?? chain.SourceRepository));
}
if (sourceRequirements.RequirePolicyHash && string.IsNullOrWhiteSpace(sourceTrack.PolicyHash))
{
findings.Add(BuildFinding(
BuildProvenanceFindingType.SourcePolicyFailed,
ProvenanceSeverity.High,
"Missing source policy hash",
"Policy hash must be present so source governance can be attested and replayed.",
subject: sourceTrack.Reference ?? chain.SourceCommit ?? chain.SourceRepository));
}
if (string.IsNullOrWhiteSpace(chain.SourceRepository))
{
findings.Add(BuildFinding(
@@ -85,6 +163,27 @@ public sealed class SourceVerifier
return findings;
}
private static ImmutableDictionary<string, string> BuildMetadata(params (string Key, string Value)[] entries)
{
if (entries.Length == 0)
{
return ImmutableDictionary<string, string>.Empty;
}
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
foreach (var entry in entries)
{
if (string.IsNullOrWhiteSpace(entry.Key) || string.IsNullOrWhiteSpace(entry.Value))
{
continue;
}
builder[entry.Key] = entry.Value;
}
return builder.ToImmutable();
}
private static bool IsSigned(ParsedBuildInfo? buildInfo)
{
if (buildInfo?.Parameters is null || buildInfo.Parameters.IsEmpty)
@@ -158,7 +257,8 @@ public sealed class SourceVerifier
ProvenanceSeverity severity,
string title,
string description,
string? subject)
string? subject,
ImmutableDictionary<string, string>? metadata = null)
{
return new ProvenanceFinding
{
@@ -166,7 +266,8 @@ public sealed class SourceVerifier
Severity = severity,
Title = title,
Description = description,
Subject = subject
Subject = subject,
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty
};
}
}

View File

@@ -18,6 +18,7 @@ public sealed record BuildProvenanceChain
{
public static BuildProvenanceChain Empty { get; } = new()
{
SourceTrack = SourceTrackEvidence.Empty,
Environment = ImmutableDictionary<string, string>.Empty,
Inputs = [],
Outputs = []
@@ -26,6 +27,7 @@ public sealed record BuildProvenanceChain
public string? BuilderId { get; init; }
public string? SourceRepository { get; init; }
public string? SourceCommit { get; init; }
public SourceTrackEvidence SourceTrack { get; init; } = SourceTrackEvidence.Empty;
public string? BuildConfigUri { get; init; }
public string? BuildConfigDigest { get; init; }
public ImmutableDictionary<string, string> Environment { get; init; } =
@@ -34,6 +36,23 @@ public sealed record BuildProvenanceChain
public ImmutableArray<BuildOutput> Outputs { get; init; } = [];
}
public sealed record SourceTrackEvidence
{
public static SourceTrackEvidence Empty { get; } = new()
{
ApproverIds = []
};
public string? Reference { get; init; }
public int? ReviewCount { get; init; }
public ImmutableArray<string> ApproverIds { get; init; } = [];
public string? AuthorId { get; init; }
public string? MergedById { get; init; }
public bool? BranchProtected { get; init; }
public bool? StatusChecksPassed { get; init; }
public string? PolicyHash { get; init; }
}
public sealed record BuildInput
{
public required string Reference { get; init; }
@@ -56,6 +75,7 @@ public sealed record BuildProvenanceAttestation
public string? BuilderId { get; init; }
public string? SourceRepository { get; init; }
public string? SourceCommit { get; init; }
public SourceTrackEvidence SourceTrack { get; init; } = SourceTrackEvidence.Empty;
public DateTimeOffset GeneratedAtUtc { get; init; } = DateTimeOffset.UtcNow;
}
@@ -82,7 +102,8 @@ public enum BuildProvenanceFindingType
NonReproducibleBuild,
SlsaLevelInsufficient,
InputIntegrityFailed,
OutputMismatch
OutputMismatch,
SourcePolicyFailed
}
public enum ProvenanceSeverity

View File

@@ -24,6 +24,11 @@ public sealed record SourceRequirements
{
public bool RequireSignedCommits { get; init; }
public bool RequireTaggedRelease { get; init; }
public int MinimumReviewApprovals { get; init; }
public bool RequireNoSelfMerge { get; init; }
public bool RequireProtectedBranch { get; init; }
public bool RequireStatusChecksPassed { get; init; }
public bool RequirePolicyHash { get; init; }
public ImmutableArray<string> AllowedRepositories { get; init; } = [];
}
@@ -58,6 +63,11 @@ public static class BuildProvenancePolicyDefaults
{
RequireSignedCommits = false,
RequireTaggedRelease = false,
MinimumReviewApprovals = 0,
RequireNoSelfMerge = false,
RequireProtectedBranch = false,
RequireStatusChecksPassed = false,
RequirePolicyHash = false,
AllowedRepositories = []
},
BuildRequirements = new BuildRequirements

View File

@@ -36,7 +36,18 @@ public static class BuildProvenanceReportFormatter
source = new
{
repository = report.ProvenanceChain.SourceRepository,
commit = report.ProvenanceChain.SourceCommit
reference = report.ProvenanceChain.SourceTrack.Reference,
commit = report.ProvenanceChain.SourceCommit,
policyHash = report.ProvenanceChain.SourceTrack.PolicyHash,
review = new
{
count = report.ProvenanceChain.SourceTrack.ReviewCount,
approvers = report.ProvenanceChain.SourceTrack.ApproverIds,
authorId = report.ProvenanceChain.SourceTrack.AuthorId,
mergedById = report.ProvenanceChain.SourceTrack.MergedById,
branchProtected = report.ProvenanceChain.SourceTrack.BranchProtected,
statusChecksPassed = report.ProvenanceChain.SourceTrack.StatusChecksPassed
}
},
buildConfig = new
{

View File

@@ -6,3 +6,4 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Scanner/__Libraries/StellaOps.Scanner.BuildProvenance/StellaOps.Scanner.BuildProvenance.md. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| STS-002 | DONE | SPRINT_20260210_004 - Added Source Track policy controls, chain capture fields, and fail-closed source policy findings. |

View File

@@ -0,0 +1,45 @@
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Row model for scanner.artifact_boms hot-lookup projection.
/// </summary>
public sealed class ArtifactBomRow
{
public string BuildId { get; set; } = default!;
public string CanonicalBomSha256 { get; set; } = default!;
public string PayloadDigest { get; set; } = default!;
public DateTimeOffset InsertedAt { get; set; }
public string? RawBomRef { get; set; }
public string? CanonicalBomRef { get; set; }
public string? DsseEnvelopeRef { get; set; }
public string? MergedVexRef { get; set; }
public string? CanonicalBomJson { get; set; }
public string? MergedVexJson { get; set; }
public string? AttestationsJson { get; set; }
public int EvidenceScore { get; set; }
public string? RekorTileId { get; set; }
public string? PendingMergedVexJson { get; set; }
}
/// <summary>
/// Result row for retention operations over artifact_boms partitions.
/// </summary>
public sealed class ArtifactBomPartitionDropRow
{
public string PartitionName { get; set; } = string.Empty;
public bool Dropped { get; set; }
}

View File

@@ -76,6 +76,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<EntryTraceRepository>();
services.AddScoped<RubyPackageInventoryRepository>();
services.AddScoped<BunPackageInventoryRepository>();
services.AddScoped<IArtifactBomRepository, PostgresArtifactBomRepository>();
services.TryAddSingleton<IClassificationHistoryRepository, ClassificationHistoryRepository>();
services.TryAddSingleton<IClassificationChangeTracker, ClassificationChangeTracker>();
services.AddScoped<IProofSpineRepository, PostgresProofSpineRepository>();

View File

@@ -0,0 +1,151 @@
-- SPDX-License-Identifier: BUSL-1.1
-- Copyright (c) 2026 StellaOps
-- Sprint: SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract
-- Task: HOT-002
--
-- Scanner hot-lookup projection for SBOM/attestation metadata.
-- Authoritative full payloads remain in CAS/object storage.
CREATE TABLE IF NOT EXISTS scanner.artifact_boms (
build_id TEXT NOT NULL,
canonical_bom_sha256 TEXT NOT NULL,
payload_digest TEXT NOT NULL,
inserted_at TIMESTAMPTZ NOT NULL DEFAULT now(),
raw_bom_ref TEXT,
canonical_bom_ref TEXT,
dsse_envelope_ref TEXT,
merged_vex_ref TEXT,
canonical_bom JSONB,
merged_vex JSONB,
attestations JSONB,
evidence_score INTEGER NOT NULL DEFAULT 0,
rekor_tile_id TEXT,
PRIMARY KEY (build_id, inserted_at)
) PARTITION BY RANGE (inserted_at);
COMMENT ON TABLE scanner.artifact_boms IS
'Monthly-partitioned Scanner SBOM/attestation hot-lookup projection for digest/component/triage queries.';
CREATE INDEX IF NOT EXISTS ix_artifact_boms_payload_digest
ON scanner.artifact_boms (payload_digest, inserted_at DESC);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_canonical_sha
ON scanner.artifact_boms (canonical_bom_sha256);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_inserted_at
ON scanner.artifact_boms (inserted_at DESC);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_canonical_gin
ON scanner.artifact_boms USING GIN (canonical_bom jsonb_path_ops);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_merged_vex_gin
ON scanner.artifact_boms USING GIN (merged_vex jsonb_path_ops);
CREATE INDEX IF NOT EXISTS ix_artifact_boms_pending_vex
ON scanner.artifact_boms USING GIN (merged_vex jsonb_path_ops)
WHERE jsonb_path_exists(
merged_vex,
'$[*] ? (@.state == "unknown" || @.state == "triage_pending")');
CREATE OR REPLACE FUNCTION scanner.create_artifact_boms_partition(p_year INT, p_month INT)
RETURNS TEXT AS $$
DECLARE
v_start DATE;
v_end DATE;
v_partition_name TEXT;
BEGIN
IF p_month < 1 OR p_month > 12 THEN
RAISE EXCEPTION 'Invalid month % (expected 1-12)', p_month;
END IF;
v_start := make_date(p_year, p_month, 1);
v_end := (v_start + INTERVAL '1 month')::DATE;
v_partition_name := format('artifact_boms_%s_%s', p_year, lpad(p_month::TEXT, 2, '0'));
EXECUTE format(
'CREATE TABLE IF NOT EXISTS scanner.%I PARTITION OF scanner.artifact_boms FOR VALUES FROM (%L) TO (%L)',
v_partition_name,
v_start::TIMESTAMPTZ,
v_end::TIMESTAMPTZ);
RETURN v_partition_name;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION scanner.create_artifact_boms_partition IS
'Creates a monthly partition for scanner.artifact_boms and returns the partition name.';
CREATE OR REPLACE FUNCTION scanner.ensure_artifact_boms_future_partitions(p_months_ahead INT DEFAULT 1)
RETURNS TABLE(partition_name TEXT) AS $$
DECLARE
v_base_month DATE;
v_current DATE;
v_month_offset INT;
BEGIN
IF p_months_ahead < 0 THEN
RAISE EXCEPTION 'p_months_ahead must be >= 0';
END IF;
v_base_month := date_trunc('month', now() AT TIME ZONE 'UTC')::DATE;
FOR v_month_offset IN 0..p_months_ahead LOOP
v_current := (v_base_month + (v_month_offset || ' months')::INTERVAL)::DATE;
partition_name := scanner.create_artifact_boms_partition(
EXTRACT(YEAR FROM v_current)::INT,
EXTRACT(MONTH FROM v_current)::INT);
RETURN NEXT;
END LOOP;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION scanner.ensure_artifact_boms_future_partitions IS
'Ensures current and upcoming scanner.artifact_boms monthly partitions exist.';
CREATE OR REPLACE FUNCTION scanner.drop_artifact_boms_partitions_older_than(
p_retain_months INT DEFAULT 12,
p_dry_run BOOLEAN DEFAULT FALSE)
RETURNS TABLE(partition_name TEXT, dropped BOOLEAN) AS $$
DECLARE
v_cutoff DATE;
v_partition RECORD;
BEGIN
IF p_retain_months < 1 THEN
RAISE EXCEPTION 'p_retain_months must be >= 1';
END IF;
v_cutoff := (date_trunc('month', now() AT TIME ZONE 'UTC')::DATE - (p_retain_months || ' months')::INTERVAL)::DATE;
FOR v_partition IN
SELECT c.relname AS relname
FROM pg_inherits i
JOIN pg_class c ON c.oid = i.inhrelid
JOIN pg_class p ON p.oid = i.inhparent
JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE p.relname = 'artifact_boms'
AND n.nspname = 'scanner'
AND c.relname ~ '^artifact_boms_[0-9]{4}_[0-9]{2}$'
AND to_date(substring(c.relname from 'artifact_boms_([0-9]{4}_[0-9]{2})'), 'YYYY_MM') < v_cutoff
ORDER BY c.relname
LOOP
partition_name := v_partition.relname;
IF p_dry_run THEN
dropped := FALSE;
RETURN NEXT;
ELSE
EXECUTE format('DROP TABLE IF EXISTS scanner.%I', v_partition.relname);
dropped := TRUE;
RETURN NEXT;
END IF;
END LOOP;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION scanner.drop_artifact_boms_partitions_older_than IS
'Drops scanner.artifact_boms monthly partitions older than retain window; supports dry-run mode.';
-- Ensure current and next month partitions exist so month-boundary ingest does not fail.
SELECT scanner.ensure_artifact_boms_future_partitions(1);

View File

@@ -26,5 +26,7 @@ internal static class MigrationIds
public const string SbomSources = "020_sbom_sources.sql";
public const string SecretDetectionSettings = "021_secret_detection_settings.sql";
public const string ReachabilityEvidence = "022_reachability_evidence.sql";
public const string RuntimeObservations = "023_runtime_observations.sql";
public const string ScoreHistory = "024_score_history.sql";
public const string ArtifactBomsHotLookup = "025_artifact_boms_hot_lookup.sql";
}

View File

@@ -0,0 +1,425 @@
using Dapper;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
/// <summary>
/// PostgreSQL implementation for scanner.artifact_boms hot-lookup projection queries.
/// </summary>
public sealed class PostgresArtifactBomRepository : IArtifactBomRepository
{
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresArtifactBomRepository> _logger;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string TableName => $"{SchemaName}.artifact_boms";
public PostgresArtifactBomRepository(
ScannerDataSource dataSource,
ILogger<PostgresArtifactBomRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<ArtifactBomRow> UpsertMonthlyAsync(ArtifactBomRow row, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(row);
ArgumentException.ThrowIfNullOrWhiteSpace(row.BuildId);
ArgumentException.ThrowIfNullOrWhiteSpace(row.CanonicalBomSha256);
ArgumentException.ThrowIfNullOrWhiteSpace(row.PayloadDigest);
var insertedAt = row.InsertedAt == default
? DateTimeOffset.UtcNow
: row.InsertedAt.ToUniversalTime();
var monthStart = new DateTimeOffset(insertedAt.Year, insertedAt.Month, 1, 0, 0, 0, TimeSpan.Zero);
var monthEnd = monthStart.AddMonths(1);
var lockKey = $"{row.CanonicalBomSha256}|{row.PayloadDigest}|{monthStart:yyyy-MM}";
const string selectExistingTemplate = """
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
raw_bom_ref AS RawBomRef,
canonical_bom_ref AS CanonicalBomRef,
dsse_envelope_ref AS DsseEnvelopeRef,
merged_vex_ref AS MergedVexRef,
canonical_bom::text AS CanonicalBomJson,
merged_vex::text AS MergedVexJson,
attestations::text AS AttestationsJson,
evidence_score AS EvidenceScore,
rekor_tile_id AS RekorTileId
FROM {0}
WHERE canonical_bom_sha256 = @CanonicalBomSha256
AND payload_digest = @PayloadDigest
AND inserted_at >= @MonthStart
AND inserted_at < @MonthEnd
ORDER BY inserted_at DESC, build_id ASC
LIMIT 1
FOR UPDATE
""";
var selectExistingSql = string.Format(selectExistingTemplate, TableName);
var updateExistingSql = $"""
UPDATE {TableName}
SET
raw_bom_ref = @RawBomRef,
canonical_bom_ref = @CanonicalBomRef,
dsse_envelope_ref = @DsseEnvelopeRef,
merged_vex_ref = @MergedVexRef,
canonical_bom = @CanonicalBomJson::jsonb,
merged_vex = @MergedVexJson::jsonb,
attestations = @AttestationsJson::jsonb,
evidence_score = @EvidenceScore,
rekor_tile_id = @RekorTileId
WHERE build_id = @BuildId
AND inserted_at = @InsertedAt
""";
var insertSql = $"""
INSERT INTO {TableName} (
build_id,
canonical_bom_sha256,
payload_digest,
inserted_at,
raw_bom_ref,
canonical_bom_ref,
dsse_envelope_ref,
merged_vex_ref,
canonical_bom,
merged_vex,
attestations,
evidence_score,
rekor_tile_id
) VALUES (
@BuildId,
@CanonicalBomSha256,
@PayloadDigest,
@InsertedAt,
@RawBomRef,
@CanonicalBomRef,
@DsseEnvelopeRef,
@MergedVexRef,
@CanonicalBomJson::jsonb,
@MergedVexJson::jsonb,
@AttestationsJson::jsonb,
@EvidenceScore,
@RekorTileId
)
ON CONFLICT (build_id, inserted_at) DO UPDATE SET
canonical_bom_sha256 = EXCLUDED.canonical_bom_sha256,
payload_digest = EXCLUDED.payload_digest,
raw_bom_ref = EXCLUDED.raw_bom_ref,
canonical_bom_ref = EXCLUDED.canonical_bom_ref,
dsse_envelope_ref = EXCLUDED.dsse_envelope_ref,
merged_vex_ref = EXCLUDED.merged_vex_ref,
canonical_bom = EXCLUDED.canonical_bom,
merged_vex = EXCLUDED.merged_vex,
attestations = EXCLUDED.attestations,
evidence_score = EXCLUDED.evidence_score,
rekor_tile_id = EXCLUDED.rekor_tile_id
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
var command = new CommandDefinition(
"SELECT pg_advisory_xact_lock(hashtext(@LockKey));",
new { LockKey = lockKey },
transaction,
cancellationToken: cancellationToken);
await connection.ExecuteAsync(command).ConfigureAwait(false);
var existing = await connection.QuerySingleOrDefaultAsync<ArtifactBomRow>(
new CommandDefinition(
selectExistingSql,
new
{
row.CanonicalBomSha256,
row.PayloadDigest,
MonthStart = monthStart,
MonthEnd = monthEnd
},
transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
if (existing is not null)
{
await connection.ExecuteAsync(
new CommandDefinition(
updateExistingSql,
new
{
BuildId = existing.BuildId,
InsertedAt = existing.InsertedAt,
row.RawBomRef,
row.CanonicalBomRef,
row.DsseEnvelopeRef,
row.MergedVexRef,
row.CanonicalBomJson,
row.MergedVexJson,
row.AttestationsJson,
row.EvidenceScore,
row.RekorTileId
},
transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
existing.RawBomRef = row.RawBomRef;
existing.CanonicalBomRef = row.CanonicalBomRef;
existing.DsseEnvelopeRef = row.DsseEnvelopeRef;
existing.MergedVexRef = row.MergedVexRef;
existing.CanonicalBomJson = row.CanonicalBomJson;
existing.MergedVexJson = row.MergedVexJson;
existing.AttestationsJson = row.AttestationsJson;
existing.EvidenceScore = row.EvidenceScore;
existing.RekorTileId = row.RekorTileId;
return existing;
}
await connection.ExecuteAsync(
new CommandDefinition(
insertSql,
new
{
row.BuildId,
row.CanonicalBomSha256,
row.PayloadDigest,
InsertedAt = insertedAt,
row.RawBomRef,
row.CanonicalBomRef,
row.DsseEnvelopeRef,
row.MergedVexRef,
row.CanonicalBomJson,
row.MergedVexJson,
row.AttestationsJson,
row.EvidenceScore,
row.RekorTileId
},
transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
row.InsertedAt = insertedAt;
return row;
}
public async Task<ArtifactBomRow?> TryGetLatestByPayloadDigestAsync(
string payloadDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(payloadDigest);
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore,
rekor_tile_id AS RekorTileId
FROM {TableName}
WHERE payload_digest = @PayloadDigest
ORDER BY inserted_at DESC, build_id ASC
LIMIT 1
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
return await connection.QuerySingleOrDefaultAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new { PayloadDigest = payloadDigest.Trim() },
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
public async Task<IReadOnlyList<ArtifactBomRow>> FindByComponentPurlAsync(
string purl,
int limit,
int offset,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
ValidatePagination(limit, offset);
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore
FROM {TableName}
WHERE jsonb_path_exists(
canonical_bom,
'$.components[*] ? (@.purl == $purl)',
jsonb_build_object('purl', to_jsonb(@Purl::text)))
ORDER BY inserted_at DESC, build_id ASC
LIMIT @Limit
OFFSET @Offset
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new { Purl = purl.Trim(), Limit = limit, Offset = offset },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
public async Task<IReadOnlyList<ArtifactBomRow>> FindByComponentNameAsync(
string componentName,
string? minVersion,
int limit,
int offset,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(componentName);
ValidatePagination(limit, offset);
var hasMinVersion = !string.IsNullOrWhiteSpace(minVersion);
var jsonPath = hasMinVersion
? "$.components[*] ? (@.name == $name && @.version >= $minVersion)"
: "$.components[*] ? (@.name == $name)";
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore
FROM {TableName}
WHERE jsonb_path_exists(
canonical_bom,
@JsonPath::jsonpath,
jsonb_build_object(
'name', to_jsonb(@Name::text),
'minVersion', to_jsonb(@MinVersion::text)))
ORDER BY inserted_at DESC, build_id ASC
LIMIT @Limit
OFFSET @Offset
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new
{
JsonPath = jsonPath,
Name = componentName.Trim().ToLowerInvariant(),
MinVersion = minVersion?.Trim() ?? string.Empty,
Limit = limit,
Offset = offset
},
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
public async Task<IReadOnlyList<ArtifactBomRow>> FindPendingTriageAsync(
int limit,
int offset,
CancellationToken cancellationToken = default)
{
ValidatePagination(limit, offset);
const string PendingPath = "$[*] ? (@.state == \"unknown\" || @.state == \"triage_pending\")";
var sql = $"""
SELECT
build_id AS BuildId,
canonical_bom_sha256 AS CanonicalBomSha256,
payload_digest AS PayloadDigest,
inserted_at AS InsertedAt,
evidence_score AS EvidenceScore,
jsonb_path_query_array(merged_vex, @PendingPath::jsonpath)::text AS PendingMergedVexJson
FROM {TableName}
WHERE jsonb_path_exists(merged_vex, @PendingPath::jsonpath)
ORDER BY inserted_at DESC, build_id ASC
LIMIT @Limit
OFFSET @Offset
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomRow>(
new CommandDefinition(
sql,
new { PendingPath, Limit = limit, Offset = offset },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
public async Task EnsureFuturePartitionsAsync(int monthsAhead, CancellationToken cancellationToken = default)
{
if (monthsAhead < 0)
{
throw new ArgumentOutOfRangeException(nameof(monthsAhead), "monthsAhead must be >= 0.");
}
var sql = $"SELECT partition_name FROM {SchemaName}.ensure_artifact_boms_future_partitions(@MonthsAhead);";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var partitions = await connection.QueryAsync<string>(
new CommandDefinition(
sql,
new { MonthsAhead = monthsAhead },
cancellationToken: cancellationToken)).ConfigureAwait(false);
_logger.LogInformation(
"Ensured scanner.artifact_boms partitions monthsAhead={MonthsAhead} createdOrVerified={Count}",
monthsAhead,
partitions.Count());
}
public async Task<IReadOnlyList<ArtifactBomPartitionDropRow>> DropOldPartitionsAsync(
int retainMonths,
bool dryRun,
CancellationToken cancellationToken = default)
{
if (retainMonths < 1)
{
throw new ArgumentOutOfRangeException(nameof(retainMonths), "retainMonths must be >= 1.");
}
var sql = $"""
SELECT
partition_name AS PartitionName,
dropped AS Dropped
FROM {SchemaName}.drop_artifact_boms_partitions_older_than(@RetainMonths, @DryRun)
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<ArtifactBomPartitionDropRow>(
new CommandDefinition(
sql,
new { RetainMonths = retainMonths, DryRun = dryRun },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.AsList();
}
private static void ValidatePagination(int limit, int offset)
{
if (limit <= 0 || limit > 500)
{
throw new ArgumentOutOfRangeException(nameof(limit), "limit must be between 1 and 500.");
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "offset must be >= 0.");
}
}
}

View File

@@ -0,0 +1,62 @@
using StellaOps.Scanner.Storage.Entities;
namespace StellaOps.Scanner.Storage.Repositories;
/// <summary>
/// Repository for Scanner SBOM/attestation hot-lookup projection rows.
/// </summary>
public interface IArtifactBomRepository
{
/// <summary>
/// Upserts a projection row in the current partition month window using
/// canonical hash and payload digest idempotency semantics.
/// </summary>
Task<ArtifactBomRow> UpsertMonthlyAsync(ArtifactBomRow row, CancellationToken cancellationToken = default);
/// <summary>
/// Returns the latest projection row for a payload digest.
/// </summary>
Task<ArtifactBomRow?> TryGetLatestByPayloadDigestAsync(
string payloadDigest,
CancellationToken cancellationToken = default);
/// <summary>
/// Finds projection rows containing a component with the specified PURL.
/// </summary>
Task<IReadOnlyList<ArtifactBomRow>> FindByComponentPurlAsync(
string purl,
int limit,
int offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Finds projection rows containing a component name and optional minimum version.
/// </summary>
Task<IReadOnlyList<ArtifactBomRow>> FindByComponentNameAsync(
string componentName,
string? minVersion,
int limit,
int offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Finds projection rows with pending triage states in merged VEX payloads.
/// </summary>
Task<IReadOnlyList<ArtifactBomRow>> FindPendingTriageAsync(
int limit,
int offset,
CancellationToken cancellationToken = default);
/// <summary>
/// Ensures current/future monthly partitions exist.
/// </summary>
Task EnsureFuturePartitionsAsync(int monthsAhead, CancellationToken cancellationToken = default);
/// <summary>
/// Drops old partitions according to retention window (in months).
/// </summary>
Task<IReadOnlyList<ArtifactBomPartitionDropRow>> DropOldPartitionsAsync(
int retainMonths,
bool dryRun,
CancellationToken cancellationToken = default);
}

View File

@@ -6,3 +6,6 @@ Source of truth: `docs/implplan/SPRINT_20260130_002_Tools_csproj_remediation_sol
| --- | --- | --- |
| REMED-05 | TODO | Remediation checklist: docs/implplan/audits/csproj-standards/remediation/checklists/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.md. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |
| HOT-002 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: added `scanner.artifact_boms` partitioned schema + indexes + helper functions. |
| HOT-003 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: implemented ingestion projection and idempotent upsert flow. |
| HOT-005 | DONE | `SPRINT_20260210_001_DOCS_sbom_attestation_hot_lookup_contract.md`: delivered partition pre-create and retention maintenance jobs/assets. |

View File

@@ -6,6 +6,9 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Scanner.Triage.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />