save dev progress

This commit is contained in:
StellaOps Bot
2025-12-26 00:32:35 +02:00
parent aa70af062e
commit ed3079543c
142 changed files with 23771 additions and 232 deletions

View File

@@ -59,6 +59,39 @@ public sealed record CanonicalAdvisory
/// <summary>Primary source edge (highest precedence).</summary>
public SourceEdge? PrimarySource => SourceEdges.Count > 0 ? SourceEdges[0] : null;
/// <summary>Distro-specific provenance scopes with backport information.</summary>
public IReadOnlyList<ProvenanceScopeDto> ProvenanceScopes { get; init; } = [];
}
/// <summary>
/// Distro-specific provenance information for a canonical advisory.
/// </summary>
public sealed record ProvenanceScopeDto
{
/// <summary>Provenance scope identifier.</summary>
public Guid Id { get; init; }
/// <summary>Linux distribution release identifier (e.g., debian:bookworm, rhel:9.2).</summary>
public required string DistroRelease { get; init; }
/// <summary>Distro's backported version if different from upstream fixed version.</summary>
public string? BackportVersion { get; init; }
/// <summary>Upstream commit SHA or patch identifier.</summary>
public string? PatchId { get; init; }
/// <summary>Source of the patch: upstream, distro, or vendor.</summary>
public string? PatchOrigin { get; init; }
/// <summary>Reference to proof entry in proofchain (if any).</summary>
public Guid? EvidenceRef { get; init; }
/// <summary>Confidence score from BackportProofService (0.0-1.0).</summary>
public double Confidence { get; init; }
/// <summary>When the provenance was last updated.</summary>
public DateTimeOffset UpdatedAt { get; init; }
}
/// <summary>

View File

@@ -77,6 +77,15 @@ public interface ICanonicalAdvisoryStore
#endregion
#region Provenance Scope Operations
/// <summary>
/// Gets all provenance scopes for a canonical advisory.
/// </summary>
Task<IReadOnlyList<ProvenanceScopeDto>> GetProvenanceScopesAsync(Guid canonicalId, CancellationToken ct = default);
#endregion
#region Source Operations
/// <summary>

View File

@@ -0,0 +1,44 @@
// -----------------------------------------------------------------------------
// CanonicalImportedEvent.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Task: IMPORT-8200-022
// Description: Event emitted when a canonical advisory is imported from a bundle
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Federation.Events;
/// <summary>
/// Event emitted when a canonical advisory is imported from a federation bundle.
/// </summary>
public sealed record CanonicalImportedEvent
{
/// <summary>Canonical advisory ID.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>CVE identifier (e.g., "CVE-2024-1234").</summary>
public string? Cve { get; init; }
/// <summary>Affects key (PURL or NEVRA pattern).</summary>
public required string AffectsKey { get; init; }
/// <summary>Merge hash for canonical identity.</summary>
public required string MergeHash { get; init; }
/// <summary>Import action: Created, Updated, or Skipped.</summary>
public required string Action { get; init; }
/// <summary>Bundle hash from which this canonical was imported.</summary>
public required string BundleHash { get; init; }
/// <summary>Source site identifier.</summary>
public required string SiteId { get; init; }
/// <summary>When the import occurred.</summary>
public DateTimeOffset ImportedAt { get; init; }
/// <summary>Whether a conflict was detected during merge.</summary>
public bool HadConflict { get; init; }
/// <summary>Conflict field if a conflict was detected.</summary>
public string? ConflictField { get; init; }
}

View File

@@ -0,0 +1,451 @@
// -----------------------------------------------------------------------------
// BundleImportService.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Tasks: IMPORT-8200-020 through IMPORT-8200-023
// Description: Orchestrates federation bundle import.
// -----------------------------------------------------------------------------
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Cache.Valkey;
using StellaOps.Concelier.Federation.Events;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Service for importing federation bundles.
/// </summary>
public sealed class BundleImportService : IBundleImportService
{
private readonly IBundleVerifier _verifier;
private readonly IBundleMergeService _mergeService;
private readonly ISyncLedgerRepository _ledgerRepository;
private readonly IEventStream<CanonicalImportedEvent>? _eventStream;
private readonly IAdvisoryCacheService? _cacheService;
private readonly TimeProvider _timeProvider;
private readonly ILogger<BundleImportService> _logger;
public BundleImportService(
IBundleVerifier verifier,
IBundleMergeService mergeService,
ISyncLedgerRepository ledgerRepository,
ILogger<BundleImportService> logger,
IEventStream<CanonicalImportedEvent>? eventStream = null,
IAdvisoryCacheService? cacheService = null,
TimeProvider? timeProvider = null)
{
_verifier = verifier;
_mergeService = mergeService;
_ledgerRepository = ledgerRepository;
_eventStream = eventStream;
_cacheService = cacheService;
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger;
}
/// <inheritdoc />
public async Task<BundleImportResult> ImportAsync(
Stream bundleStream,
BundleImportOptions? options = null,
CancellationToken cancellationToken = default)
{
options ??= new BundleImportOptions();
var stopwatch = Stopwatch.StartNew();
try
{
// 1. Parse bundle
using var reader = await BundleReader.ReadAsync(bundleStream, cancellationToken);
var manifest = reader.Manifest;
_logger.LogInformation("Importing bundle {BundleHash} from site {SiteId}",
manifest.BundleHash, manifest.SiteId);
// 2. Verify bundle
var validation = await _verifier.VerifyAsync(
reader,
options.SkipSignatureVerification,
cancellationToken);
if (!validation.IsValid)
{
_logger.LogWarning("Bundle verification failed: {Errors}",
string.Join("; ", validation.Errors));
return BundleImportResult.Failed(
manifest.BundleHash,
string.Join("; ", validation.Errors),
stopwatch.Elapsed);
}
// 3. Check cursor (must be after current)
var currentCursor = await _ledgerRepository.GetCursorAsync(manifest.SiteId, cancellationToken);
if (currentCursor != null && !options.Force)
{
if (!CursorComparer.IsAfter(manifest.ExportCursor, currentCursor))
{
return BundleImportResult.Failed(
manifest.BundleHash,
$"Bundle cursor {manifest.ExportCursor} is not after current cursor {currentCursor}",
stopwatch.Elapsed);
}
}
// 4. Check for duplicate bundle
var existingBundle = await _ledgerRepository.GetByBundleHashAsync(manifest.BundleHash, cancellationToken);
if (existingBundle != null)
{
_logger.LogInformation("Bundle {BundleHash} already imported", manifest.BundleHash);
return BundleImportResult.Succeeded(
manifest.BundleHash,
existingBundle.Cursor,
new ImportCounts { CanonicalSkipped = manifest.Counts.Canonicals },
duration: stopwatch.Elapsed);
}
// 5. Dry run - return preview
if (options.DryRun)
{
return BundleImportResult.Succeeded(
manifest.BundleHash,
manifest.ExportCursor,
new ImportCounts
{
CanonicalCreated = manifest.Counts.Canonicals,
EdgesAdded = manifest.Counts.Edges,
DeletionsProcessed = manifest.Counts.Deletions
},
duration: stopwatch.Elapsed);
}
// 6. Import canonicals
var conflicts = new List<ImportConflict>();
var counts = new ImportCounts();
var pendingEvents = new List<CanonicalImportedEvent>();
var importTimestamp = _timeProvider.GetUtcNow();
await foreach (var canonical in reader.StreamCanonicalsAsync(cancellationToken))
{
var result = await _mergeService.MergeCanonicalAsync(
canonical,
options.OnConflict,
cancellationToken);
counts = result.Action switch
{
MergeAction.Created => counts with { CanonicalCreated = counts.CanonicalCreated + 1 },
MergeAction.Updated => counts with { CanonicalUpdated = counts.CanonicalUpdated + 1 },
MergeAction.Skipped => counts with { CanonicalSkipped = counts.CanonicalSkipped + 1 },
_ => counts
};
if (result.Conflict != null)
{
conflicts.Add(result.Conflict);
if (options.OnConflict == ConflictResolution.Fail)
{
return BundleImportResult.Failed(
manifest.BundleHash,
$"Conflict on {result.Conflict.MergeHash}.{result.Conflict.Field}",
stopwatch.Elapsed);
}
}
// Task 22: Queue event for downstream consumers
if (result.Action != MergeAction.Skipped)
{
pendingEvents.Add(new CanonicalImportedEvent
{
CanonicalId = canonical.Id,
Cve = canonical.Cve,
AffectsKey = canonical.AffectsKey,
MergeHash = canonical.MergeHash,
Action = result.Action.ToString(),
BundleHash = manifest.BundleHash,
SiteId = manifest.SiteId,
ImportedAt = importTimestamp,
HadConflict = result.Conflict != null,
ConflictField = result.Conflict?.Field
});
// Task 23: Update cache indexes for imported canonical
await UpdateCacheIndexesAsync(canonical, cancellationToken);
}
}
// 7. Import edges
await foreach (var edge in reader.StreamEdgesAsync(cancellationToken))
{
var added = await _mergeService.MergeEdgeAsync(edge, cancellationToken);
if (added)
{
counts = counts with { EdgesAdded = counts.EdgesAdded + 1 };
}
}
// 8. Process deletions
await foreach (var deletion in reader.StreamDeletionsAsync(cancellationToken))
{
await _mergeService.ProcessDeletionAsync(deletion, cancellationToken);
counts = counts with { DeletionsProcessed = counts.DeletionsProcessed + 1 };
}
// 9. Update sync ledger
await _ledgerRepository.AdvanceCursorAsync(
manifest.SiteId,
manifest.ExportCursor,
manifest.BundleHash,
manifest.Counts.Total,
manifest.ExportedAt,
cancellationToken);
// 10. Publish import events for downstream consumers (Task 22)
await PublishImportEventsAsync(pendingEvents, cancellationToken);
_logger.LogInformation(
"Bundle {BundleHash} imported: {Created} created, {Updated} updated, {Skipped} skipped, {Edges} edges, {Deletions} deletions",
manifest.BundleHash,
counts.CanonicalCreated,
counts.CanonicalUpdated,
counts.CanonicalSkipped,
counts.EdgesAdded,
counts.DeletionsProcessed);
return BundleImportResult.Succeeded(
manifest.BundleHash,
manifest.ExportCursor,
counts,
conflicts,
stopwatch.Elapsed);
}
catch (Exception ex)
{
_logger.LogError(ex, "Bundle import failed");
return BundleImportResult.Failed(
"unknown",
ex.Message,
stopwatch.Elapsed);
}
}
/// <inheritdoc />
public async Task<BundleImportResult> ImportFromFileAsync(
string filePath,
BundleImportOptions? options = null,
CancellationToken cancellationToken = default)
{
await using var fileStream = File.OpenRead(filePath);
return await ImportAsync(fileStream, options, cancellationToken);
}
/// <inheritdoc />
public async Task<BundleValidationResult> ValidateAsync(
Stream bundleStream,
CancellationToken cancellationToken = default)
{
using var reader = await BundleReader.ReadAsync(bundleStream, cancellationToken);
return await _verifier.VerifyAsync(reader, skipSignature: false, cancellationToken);
}
/// <inheritdoc />
public async Task<BundleImportPreview> PreviewAsync(
Stream bundleStream,
CancellationToken cancellationToken = default)
{
using var reader = await BundleReader.ReadAsync(bundleStream, cancellationToken);
var manifest = reader.Manifest;
var validation = await _verifier.VerifyAsync(reader, skipSignature: false, cancellationToken);
var currentCursor = await _ledgerRepository.GetCursorAsync(manifest.SiteId, cancellationToken);
var existingBundle = await _ledgerRepository.GetByBundleHashAsync(manifest.BundleHash, cancellationToken);
return new BundleImportPreview
{
Manifest = manifest,
IsValid = validation.IsValid,
Errors = validation.Errors,
Warnings = validation.Warnings,
IsDuplicate = existingBundle != null,
CurrentCursor = currentCursor
};
}
/// <summary>
/// Publishes import events for downstream consumers (Task 22: IMPORT-8200-022).
/// </summary>
private async Task PublishImportEventsAsync(
IReadOnlyList<CanonicalImportedEvent> events,
CancellationToken cancellationToken)
{
if (_eventStream == null || events.Count == 0)
{
return;
}
try
{
var results = await _eventStream.PublishBatchAsync(events, cancellationToken: cancellationToken);
var successCount = results.Count(r => r.Success);
if (successCount < events.Count)
{
_logger.LogWarning(
"Published {SuccessCount}/{TotalCount} import events",
successCount,
events.Count);
}
else
{
_logger.LogDebug("Published {Count} import events", events.Count);
}
}
catch (Exception ex)
{
// Log but don't fail the import - events are best-effort
_logger.LogWarning(ex, "Failed to publish import events");
}
}
/// <summary>
/// Updates Valkey cache indexes for an imported canonical (Task 23: IMPORT-8200-023).
/// </summary>
private async Task UpdateCacheIndexesAsync(
CanonicalBundleLine canonical,
CancellationToken cancellationToken)
{
if (_cacheService == null)
{
return;
}
try
{
// Index by affects key (PURL) for artifact lookups
await _cacheService.IndexPurlAsync(canonical.AffectsKey, canonical.MergeHash, cancellationToken);
// Index by CVE for vulnerability lookups
if (!string.IsNullOrEmpty(canonical.Cve))
{
await _cacheService.IndexCveAsync(canonical.Cve, canonical.MergeHash, cancellationToken);
}
// Invalidate existing cache entry to force refresh from DB
await _cacheService.InvalidateAsync(canonical.MergeHash, cancellationToken);
}
catch (Exception ex)
{
// Log but don't fail the import - caching is best-effort
_logger.LogWarning(ex,
"Failed to update cache indexes for canonical {MergeHash}",
canonical.MergeHash);
}
}
}
/// <summary>
/// Repository for sync ledger entries.
/// </summary>
public interface ISyncLedgerRepository
{
/// <summary>Get current cursor for a site.</summary>
Task<string?> GetCursorAsync(string siteId, CancellationToken ct = default);
/// <summary>Get ledger entry by bundle hash.</summary>
Task<SyncLedgerEntry?> GetByBundleHashAsync(string bundleHash, CancellationToken ct = default);
/// <summary>Advance cursor after successful import.</summary>
Task AdvanceCursorAsync(
string siteId,
string cursor,
string bundleHash,
int itemCount,
DateTimeOffset exportedAt,
CancellationToken ct = default);
/// <summary>Get all site policies.</summary>
Task<IReadOnlyList<SitePolicy>> GetAllPoliciesAsync(bool enabledOnly = true, CancellationToken ct = default);
/// <summary>Get site policy by ID.</summary>
Task<SitePolicy?> GetPolicyAsync(string siteId, CancellationToken ct = default);
/// <summary>Update or create site policy.</summary>
Task UpsertPolicyAsync(SitePolicy policy, CancellationToken ct = default);
/// <summary>Get latest ledger entry for a site.</summary>
Task<SyncLedgerEntry?> GetLatestAsync(string siteId, CancellationToken ct = default);
/// <summary>Get history of ledger entries for a site.</summary>
IAsyncEnumerable<SyncLedgerEntry> GetHistoryAsync(string siteId, int limit, CancellationToken ct = default);
}
/// <summary>
/// Sync ledger entry.
/// </summary>
public sealed record SyncLedgerEntry
{
public required string SiteId { get; init; }
public required string Cursor { get; init; }
public required string BundleHash { get; init; }
public int ItemCount { get; init; }
public DateTimeOffset ExportedAt { get; init; }
public DateTimeOffset ImportedAt { get; init; }
}
/// <summary>
/// Site policy for federation.
/// </summary>
public sealed record SitePolicy
{
public required string SiteId { get; init; }
public string? DisplayName { get; init; }
public bool Enabled { get; init; }
public DateTimeOffset? LastSyncAt { get; init; }
public string? LastCursor { get; init; }
public int TotalImports { get; init; }
public List<string>? AllowedSources { get; init; }
public long? MaxBundleSizeBytes { get; init; }
}
/// <summary>
/// Cursor comparison utilities.
/// </summary>
public static class CursorComparer
{
/// <summary>
/// Check if cursor A is after cursor B.
/// Cursors are in format: {ISO8601}#{sequence}
/// </summary>
public static bool IsAfter(string cursorA, string cursorB)
{
if (string.IsNullOrWhiteSpace(cursorA) || string.IsNullOrWhiteSpace(cursorB))
return true; // Allow if either is missing
var partsA = cursorA.Split('#');
var partsB = cursorB.Split('#');
if (partsA.Length < 2 || partsB.Length < 2)
return true; // Allow if format is unexpected
// Compare timestamps first
if (DateTimeOffset.TryParse(partsA[0], out var timeA) &&
DateTimeOffset.TryParse(partsB[0], out var timeB))
{
if (timeA > timeB) return true;
if (timeA < timeB) return false;
// Same timestamp, compare sequence
if (int.TryParse(partsA[1], out var seqA) &&
int.TryParse(partsB[1], out var seqB))
{
return seqA > seqB;
}
}
// Fall back to string comparison
return string.Compare(cursorA, cursorB, StringComparison.Ordinal) > 0;
}
}

View File

@@ -0,0 +1,214 @@
// -----------------------------------------------------------------------------
// BundleMergeService.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Tasks: IMPORT-8200-013 through IMPORT-8200-017
// Description: Merges bundle contents into local canonical store.
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Service for merging bundle contents into local canonical store.
/// </summary>
public sealed class BundleMergeService : IBundleMergeService
{
private readonly ICanonicalMergeRepository _repository;
private readonly ILogger<BundleMergeService> _logger;
public BundleMergeService(
ICanonicalMergeRepository repository,
ILogger<BundleMergeService> logger)
{
_repository = repository;
_logger = logger;
}
/// <inheritdoc />
public async Task<MergeResult> MergeCanonicalAsync(
CanonicalBundleLine canonical,
ConflictResolution resolution,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(canonical);
// Check if canonical exists
var existing = await _repository.GetByMergeHashAsync(canonical.MergeHash, cancellationToken);
if (existing == null)
{
// New canonical - insert
await _repository.InsertCanonicalAsync(canonical, cancellationToken);
_logger.LogDebug("Created canonical {MergeHash}", canonical.MergeHash);
return MergeResult.Created();
}
// Existing canonical - check for conflicts and update
var conflict = DetectConflict(existing, canonical);
if (conflict != null)
{
conflict = conflict with { Resolution = resolution };
if (resolution == ConflictResolution.Fail)
{
_logger.LogWarning("Conflict detected on {MergeHash}.{Field}: local={LocalValue}, remote={RemoteValue}",
conflict.MergeHash, conflict.Field, conflict.LocalValue, conflict.RemoteValue);
return MergeResult.UpdatedWithConflict(conflict);
}
if (resolution == ConflictResolution.PreferLocal)
{
_logger.LogDebug("Skipping update for {MergeHash} - preferring local value", canonical.MergeHash);
return MergeResult.Skipped();
}
}
// Update with remote values (PreferRemote is default)
await _repository.UpdateCanonicalAsync(canonical, cancellationToken);
_logger.LogDebug("Updated canonical {MergeHash}", canonical.MergeHash);
return conflict != null
? MergeResult.UpdatedWithConflict(conflict)
: MergeResult.Updated();
}
/// <inheritdoc />
public async Task<bool> MergeEdgeAsync(
EdgeBundleLine edge,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(edge);
// Check if edge already exists
var exists = await _repository.EdgeExistsAsync(
edge.CanonicalId,
edge.Source,
edge.SourceAdvisoryId,
cancellationToken);
if (exists)
{
_logger.LogDebug("Edge already exists: {CanonicalId}/{Source}/{SourceAdvisoryId}",
edge.CanonicalId, edge.Source, edge.SourceAdvisoryId);
return false;
}
// Insert new edge
await _repository.InsertEdgeAsync(edge, cancellationToken);
_logger.LogDebug("Added edge: {CanonicalId}/{Source}/{SourceAdvisoryId}",
edge.CanonicalId, edge.Source, edge.SourceAdvisoryId);
return true;
}
/// <inheritdoc />
public async Task ProcessDeletionAsync(
DeletionBundleLine deletion,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(deletion);
await _repository.MarkAsWithdrawnAsync(
deletion.CanonicalId,
deletion.DeletedAt,
deletion.Reason,
cancellationToken);
_logger.LogDebug("Marked canonical {CanonicalId} as withdrawn: {Reason}",
deletion.CanonicalId, deletion.Reason);
}
private static ImportConflict? DetectConflict(
ExistingCanonical existing,
CanonicalBundleLine incoming)
{
// Check for meaningful conflicts (not just timestamp differences)
// Severity conflict
if (!string.Equals(existing.Severity, incoming.Severity, StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(existing.Severity) &&
!string.IsNullOrEmpty(incoming.Severity))
{
return new ImportConflict
{
MergeHash = incoming.MergeHash,
Field = "severity",
LocalValue = existing.Severity,
RemoteValue = incoming.Severity,
Resolution = ConflictResolution.PreferRemote
};
}
// Status conflict
if (!string.Equals(existing.Status, incoming.Status, StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(existing.Status) &&
!string.IsNullOrEmpty(incoming.Status))
{
return new ImportConflict
{
MergeHash = incoming.MergeHash,
Field = "status",
LocalValue = existing.Status,
RemoteValue = incoming.Status,
Resolution = ConflictResolution.PreferRemote
};
}
// Title conflict (less critical, but worth noting)
if (!string.Equals(existing.Title, incoming.Title, StringComparison.Ordinal) &&
!string.IsNullOrEmpty(existing.Title) &&
!string.IsNullOrEmpty(incoming.Title) &&
existing.Title.Length > 10) // Only if title is meaningful
{
return new ImportConflict
{
MergeHash = incoming.MergeHash,
Field = "title",
LocalValue = existing.Title?.Length > 50 ? existing.Title[..50] + "..." : existing.Title,
RemoteValue = incoming.Title?.Length > 50 ? incoming.Title[..50] + "..." : incoming.Title,
Resolution = ConflictResolution.PreferRemote
};
}
return null;
}
}
/// <summary>
/// Repository interface for canonical merge operations.
/// </summary>
public interface ICanonicalMergeRepository
{
/// <summary>Get existing canonical by merge hash.</summary>
Task<ExistingCanonical?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default);
/// <summary>Insert a new canonical.</summary>
Task InsertCanonicalAsync(CanonicalBundleLine canonical, CancellationToken ct = default);
/// <summary>Update an existing canonical.</summary>
Task UpdateCanonicalAsync(CanonicalBundleLine canonical, CancellationToken ct = default);
/// <summary>Check if a source edge exists.</summary>
Task<bool> EdgeExistsAsync(Guid canonicalId, string source, string sourceAdvisoryId, CancellationToken ct = default);
/// <summary>Insert a new source edge.</summary>
Task InsertEdgeAsync(EdgeBundleLine edge, CancellationToken ct = default);
/// <summary>Mark a canonical as withdrawn.</summary>
Task MarkAsWithdrawnAsync(Guid canonicalId, DateTimeOffset deletedAt, string? reason, CancellationToken ct = default);
}
/// <summary>
/// Existing canonical data for conflict detection.
/// </summary>
public sealed record ExistingCanonical
{
public required Guid Id { get; init; }
public required string MergeHash { get; init; }
public string? Severity { get; init; }
public string? Status { get; init; }
public string? Title { get; init; }
public DateTimeOffset UpdatedAt { get; init; }
}

View File

@@ -11,6 +11,7 @@ using System.Text.Json;
using StellaOps.Concelier.Federation.Compression;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Concelier.Federation.Serialization;
using StellaOps.Concelier.Federation.Signing;
namespace StellaOps.Concelier.Federation.Import;

View File

@@ -61,13 +61,28 @@ public sealed record BundleValidationResult
/// <summary>Signature verification result.</summary>
public SignatureVerificationResult? SignatureResult { get; init; }
/// <summary>Whether the bundle hash is valid.</summary>
public bool HashValid { get; init; }
/// <summary>Whether the signature is valid (or skipped).</summary>
public bool SignatureValid { get; init; }
/// <summary>Whether the cursor is valid for import.</summary>
public bool CursorValid { get; init; }
/// <summary>Create a successful validation result.</summary>
public static BundleValidationResult Success(BundleManifest manifest, SignatureVerificationResult? signatureResult = null)
public static BundleValidationResult Success(
BundleManifest manifest,
SignatureVerificationResult? signatureResult = null,
bool cursorValid = true)
=> new()
{
IsValid = true,
Manifest = manifest,
SignatureResult = signatureResult
SignatureResult = signatureResult,
HashValid = true,
SignatureValid = signatureResult?.IsValid ?? true,
CursorValid = cursorValid
};
/// <summary>Create a failed validation result.</summary>

View File

@@ -25,6 +25,11 @@ public static class BundleSerializer
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
/// <summary>
/// Default JSON serializer options for bundle content.
/// </summary>
public static JsonSerializerOptions Options => NdjsonOptions;
/// <summary>
/// Serialize manifest to JSON bytes.
/// </summary>

View File

@@ -16,7 +16,9 @@
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Postgres\StellaOps.Concelier.Storage.Postgres.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Cache.Valkey\StellaOps.Concelier.Cache.Valkey.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Provenance\StellaOps.Provenance.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,306 @@
// -----------------------------------------------------------------------------
// BackportEvidenceResolver.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Tasks: BACKPORT-8200-006, BACKPORT-8200-007, BACKPORT-8200-008
// Description: Resolves backport evidence by calling proof generator
// -----------------------------------------------------------------------------
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Merge.Backport;
/// <summary>
/// Resolves backport evidence by delegating to proof generator
/// and extracting patch lineage for merge hash computation.
/// </summary>
public sealed partial class BackportEvidenceResolver : IBackportEvidenceResolver
{
private readonly IProofGenerator _proofGenerator;
private readonly ILogger<BackportEvidenceResolver> _logger;
public BackportEvidenceResolver(
IProofGenerator proofGenerator,
ILogger<BackportEvidenceResolver> logger)
{
_proofGenerator = proofGenerator ?? throw new ArgumentNullException(nameof(proofGenerator));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<BackportEvidence?> ResolveAsync(
string cveId,
string packagePurl,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
ArgumentException.ThrowIfNullOrWhiteSpace(packagePurl);
_logger.LogDebug("Resolving backport evidence for {CveId} in {Package}", cveId, packagePurl);
var proof = await _proofGenerator.GenerateProofAsync(cveId, packagePurl, ct);
if (proof is null || proof.Confidence < 0.1)
{
_logger.LogDebug("No sufficient evidence for {CveId} in {Package}", cveId, packagePurl);
return null;
}
return ExtractBackportEvidence(cveId, packagePurl, proof);
}
/// <inheritdoc />
public async Task<IReadOnlyList<BackportEvidence>> ResolveBatchAsync(
string cveId,
IEnumerable<string> packagePurls,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
ArgumentNullException.ThrowIfNull(packagePurls);
var requests = packagePurls.Select(purl => (cveId, purl));
var proofs = await _proofGenerator.GenerateProofBatchAsync(requests, ct);
var results = new List<BackportEvidence>();
foreach (var proof in proofs)
{
var purl = ExtractPurlFromSubjectId(proof.SubjectId);
if (purl != null)
{
var evidence = ExtractBackportEvidence(cveId, purl, proof);
if (evidence != null)
{
results.Add(evidence);
}
}
}
return results;
}
/// <inheritdoc />
public async Task<bool> HasEvidenceAsync(
string cveId,
string packagePurl,
CancellationToken ct = default)
{
var evidence = await ResolveAsync(cveId, packagePurl, ct);
return evidence is not null && evidence.Confidence >= 0.3;
}
private BackportEvidence? ExtractBackportEvidence(string cveId, string packagePurl, ProofResult proof)
{
var distroRelease = ExtractDistroRelease(packagePurl);
var tier = DetermineHighestTier(proof.Evidences);
var (patchId, patchOrigin) = ExtractPatchLineage(proof.Evidences);
var backportVersion = ExtractBackportVersion(proof.Evidences, packagePurl);
if (tier == BackportEvidenceTier.DistroAdvisory && proof.Confidence < 0.3)
{
return null;
}
return new BackportEvidence
{
CveId = cveId,
PackagePurl = packagePurl,
DistroRelease = distroRelease,
Tier = tier,
Confidence = proof.Confidence,
PatchId = patchId,
BackportVersion = backportVersion,
PatchOrigin = patchOrigin,
ProofId = proof.ProofId,
EvidenceDate = proof.CreatedAt
};
}
private static BackportEvidenceTier DetermineHighestTier(IReadOnlyList<ProofEvidenceItem> evidences)
{
var highestTier = BackportEvidenceTier.DistroAdvisory;
foreach (var evidence in evidences)
{
var tier = evidence.Type.ToUpperInvariant() switch
{
"BINARYFINGERPRINT" => BackportEvidenceTier.BinaryFingerprint,
"PATCHHEADER" => BackportEvidenceTier.PatchHeader,
"CHANGELOGMENTION" => BackportEvidenceTier.ChangelogMention,
"DISTROADVISORY" => BackportEvidenceTier.DistroAdvisory,
_ => BackportEvidenceTier.DistroAdvisory
};
if (tier > highestTier)
{
highestTier = tier;
}
}
return highestTier;
}
private static (string? PatchId, PatchOrigin Origin) ExtractPatchLineage(IReadOnlyList<ProofEvidenceItem> evidences)
{
// Priority order: PatchHeader > Changelog > Advisory
var patchEvidence = evidences
.Where(e => e.Type.Equals("PatchHeader", StringComparison.OrdinalIgnoreCase) ||
e.Type.Equals("ChangelogMention", StringComparison.OrdinalIgnoreCase))
.OrderByDescending(e => e.Type.Equals("PatchHeader", StringComparison.OrdinalIgnoreCase) ? 1 : 0)
.FirstOrDefault();
if (patchEvidence is null)
{
return (null, PatchOrigin.Upstream);
}
string? patchId = null;
var origin = PatchOrigin.Upstream;
// Try to extract patch info from data dictionary
if (patchEvidence.Data.TryGetValue("commit_sha", out var sha))
{
patchId = sha;
origin = PatchOrigin.Upstream;
}
else if (patchEvidence.Data.TryGetValue("patch_id", out var pid))
{
patchId = pid;
}
else if (patchEvidence.Data.TryGetValue("upstream_commit", out var uc))
{
patchId = uc;
origin = PatchOrigin.Upstream;
}
else if (patchEvidence.Data.TryGetValue("distro_patch_id", out var dpid))
{
patchId = dpid;
origin = PatchOrigin.Distro;
}
// Try to determine origin from source field
if (origin == PatchOrigin.Upstream)
{
var source = patchEvidence.Source.ToLowerInvariant();
origin = source switch
{
"upstream" or "github" or "gitlab" => PatchOrigin.Upstream,
"debian" or "redhat" or "suse" or "ubuntu" or "alpine" => PatchOrigin.Distro,
"vendor" or "cisco" or "oracle" or "microsoft" => PatchOrigin.Vendor,
_ => PatchOrigin.Upstream
};
}
// If still no patch ID, try to extract from evidence ID
if (patchId is null && patchEvidence.EvidenceId.Contains(':'))
{
var match = CommitShaRegex().Match(patchEvidence.EvidenceId);
if (match.Success)
{
patchId = match.Value;
}
}
return (patchId, origin);
}
private static string? ExtractBackportVersion(IReadOnlyList<ProofEvidenceItem> evidences, string packagePurl)
{
// Try to extract version from advisory evidence
var advisory = evidences.FirstOrDefault(e =>
e.Type.Equals("DistroAdvisory", StringComparison.OrdinalIgnoreCase));
if (advisory is not null)
{
if (advisory.Data.TryGetValue("fixed_version", out var fv))
{
return fv;
}
if (advisory.Data.TryGetValue("patched_version", out var pv))
{
return pv;
}
}
// Fallback: extract version from PURL if present
var match = PurlVersionRegex().Match(packagePurl);
return match.Success ? match.Groups[1].Value : null;
}
private static string ExtractDistroRelease(string packagePurl)
{
// Extract distro from PURL
// Format: pkg:deb/debian/curl@7.64.0-4 -> debian
// Format: pkg:rpm/redhat/openssl@1.0.2k-19.el7 -> redhat
var match = PurlDistroRegex().Match(packagePurl);
if (match.Success)
{
// Group 2 is the distro name (debian, ubuntu, etc.), Group 1 is package type (deb, rpm, apk)
var distro = match.Groups[2].Value.ToLowerInvariant();
// Try to extract release codename from version
var versionMatch = PurlVersionRegex().Match(packagePurl);
if (versionMatch.Success)
{
var version = versionMatch.Groups[1].Value;
// Debian patterns: ~deb11, ~deb12, +deb12
var debMatch = DebianReleaseRegex().Match(version);
if (debMatch.Success)
{
var debVersion = debMatch.Groups[1].Value;
var codename = debVersion switch
{
"11" => "bullseye",
"12" => "bookworm",
"13" => "trixie",
_ => debVersion
};
return $"{distro}:{codename}";
}
// RHEL patterns: .el7, .el8, .el9
var rhelMatch = RhelReleaseRegex().Match(version);
if (rhelMatch.Success)
{
return $"{distro}:{rhelMatch.Groups[1].Value}";
}
// Ubuntu patterns: ~22.04, +22.04
var ubuntuMatch = UbuntuReleaseRegex().Match(version);
if (ubuntuMatch.Success)
{
return $"{distro}:{ubuntuMatch.Groups[1].Value}";
}
}
return distro;
}
return "unknown";
}
private static string? ExtractPurlFromSubjectId(string subjectId)
{
// Format: CVE-XXXX-YYYY:pkg:...
var colonIndex = subjectId.IndexOf("pkg:", StringComparison.Ordinal);
return colonIndex >= 0 ? subjectId[colonIndex..] : null;
}
[GeneratedRegex(@"[0-9a-f]{40}", RegexOptions.IgnoreCase)]
private static partial Regex CommitShaRegex();
[GeneratedRegex(@"@([^@]+)$")]
private static partial Regex PurlVersionRegex();
[GeneratedRegex(@"pkg:(deb|rpm|apk)/([^/]+)/")]
private static partial Regex PurlDistroRegex();
[GeneratedRegex(@"[+~]deb(\d+)")]
private static partial Regex DebianReleaseRegex();
[GeneratedRegex(@"\.el(\d+)")]
private static partial Regex RhelReleaseRegex();
[GeneratedRegex(@"[+~](\d+\.\d+)")]
private static partial Regex UbuntuReleaseRegex();
}

View File

@@ -0,0 +1,112 @@
// -----------------------------------------------------------------------------
// IBackportEvidenceResolver.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-005
// Description: Interface for resolving backport evidence from proof service
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Merge.Backport;
/// <summary>
/// Resolves backport evidence for CVE + package combinations.
/// Bridges BackportProofService to the merge deduplication pipeline.
/// </summary>
public interface IBackportEvidenceResolver
{
/// <summary>
/// Resolve backport evidence for a CVE + package combination.
/// </summary>
/// <param name="cveId">CVE identifier (e.g., CVE-2024-1234)</param>
/// <param name="packagePurl">Package URL (e.g., pkg:deb/debian/curl@7.64.0-4)</param>
/// <param name="ct">Cancellation token</param>
/// <returns>Backport evidence with patch lineage and confidence, or null if no evidence</returns>
Task<BackportEvidence?> ResolveAsync(
string cveId,
string packagePurl,
CancellationToken ct = default);
/// <summary>
/// Resolve evidence for multiple packages in batch.
/// </summary>
/// <param name="cveId">CVE identifier</param>
/// <param name="packagePurls">Package URLs to check</param>
/// <param name="ct">Cancellation token</param>
/// <returns>Evidence for each package that has backport proof</returns>
Task<IReadOnlyList<BackportEvidence>> ResolveBatchAsync(
string cveId,
IEnumerable<string> packagePurls,
CancellationToken ct = default);
/// <summary>
/// Check if backport evidence exists without retrieving full details.
/// </summary>
Task<bool> HasEvidenceAsync(
string cveId,
string packagePurl,
CancellationToken ct = default);
}
/// <summary>
/// Abstraction for generating proof blobs (wraps BackportProofService).
/// Allows the Merge library to consume proof without direct dependency.
/// </summary>
public interface IProofGenerator
{
/// <summary>
/// Generate proof for a CVE + package combination.
/// </summary>
Task<ProofResult?> GenerateProofAsync(
string cveId,
string packagePurl,
CancellationToken ct = default);
/// <summary>
/// Generate proofs for multiple CVE + package combinations.
/// </summary>
Task<IReadOnlyList<ProofResult>> GenerateProofBatchAsync(
IEnumerable<(string CveId, string PackagePurl)> requests,
CancellationToken ct = default);
}
/// <summary>
/// Simplified proof result for merge library consumption.
/// Maps from ProofBlob to avoid direct Attestor dependency.
/// </summary>
public sealed record ProofResult
{
/// <summary>Proof identifier.</summary>
public required string ProofId { get; init; }
/// <summary>Subject identifier (CVE:PURL).</summary>
public required string SubjectId { get; init; }
/// <summary>Confidence score (0.0-1.0).</summary>
public double Confidence { get; init; }
/// <summary>When the proof was generated.</summary>
public DateTimeOffset CreatedAt { get; init; }
/// <summary>Evidence items.</summary>
public IReadOnlyList<ProofEvidenceItem> Evidences { get; init; } = [];
}
/// <summary>
/// Simplified evidence item for merge library consumption.
/// </summary>
public sealed record ProofEvidenceItem
{
/// <summary>Evidence identifier.</summary>
public required string EvidenceId { get; init; }
/// <summary>Evidence type (DistroAdvisory, ChangelogMention, PatchHeader, BinaryFingerprint).</summary>
public required string Type { get; init; }
/// <summary>Source of the evidence.</summary>
public required string Source { get; init; }
/// <summary>Evidence timestamp.</summary>
public DateTimeOffset Timestamp { get; init; }
/// <summary>Extracted data fields (optional, type-specific).</summary>
public IReadOnlyDictionary<string, string> Data { get; init; } = new Dictionary<string, string>();
}

View File

@@ -0,0 +1,157 @@
// -----------------------------------------------------------------------------
// IProvenanceScopeService.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-014
// Description: Service interface for provenance scope management
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Merge.Backport;
/// <summary>
/// Service for managing provenance scope during canonical advisory lifecycle.
/// Populates and updates provenance_scope table with backport evidence.
/// </summary>
public interface IProvenanceScopeService
{
/// <summary>
/// Creates or updates provenance scope for a canonical advisory during ingest.
/// Called when a new canonical is created or when new evidence arrives.
/// </summary>
/// <param name="request">Provenance scope creation request</param>
/// <param name="ct">Cancellation token</param>
/// <returns>Result indicating success and scope ID</returns>
Task<ProvenanceScopeResult> CreateOrUpdateAsync(
ProvenanceScopeRequest request,
CancellationToken ct = default);
/// <summary>
/// Gets all provenance scopes for a canonical advisory.
/// </summary>
Task<IReadOnlyList<ProvenanceScope>> GetByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default);
/// <summary>
/// Updates provenance scope when new backport evidence is discovered.
/// </summary>
Task<ProvenanceScopeResult> UpdateFromEvidenceAsync(
Guid canonicalId,
BackportEvidence evidence,
CancellationToken ct = default);
/// <summary>
/// Links a provenance scope to a proof entry reference.
/// </summary>
Task LinkEvidenceRefAsync(
Guid provenanceScopeId,
Guid evidenceRef,
CancellationToken ct = default);
/// <summary>
/// Deletes all provenance scopes for a canonical (cascade on canonical delete).
/// </summary>
Task DeleteByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default);
}
/// <summary>
/// Request to create or update provenance scope.
/// </summary>
public sealed record ProvenanceScopeRequest
{
/// <summary>
/// Canonical advisory ID to associate provenance with.
/// </summary>
public required Guid CanonicalId { get; init; }
/// <summary>
/// CVE identifier (for evidence resolution).
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Package PURL (for evidence resolution and distro extraction).
/// </summary>
public required string PackagePurl { get; init; }
/// <summary>
/// Source name (debian, redhat, etc.).
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Patch lineage if already known from advisory.
/// </summary>
public string? PatchLineage { get; init; }
/// <summary>
/// Fixed version from advisory.
/// </summary>
public string? FixedVersion { get; init; }
/// <summary>
/// Whether to resolve additional evidence from proof service.
/// </summary>
public bool ResolveEvidence { get; init; } = true;
}
/// <summary>
/// Result of provenance scope operation.
/// </summary>
public sealed record ProvenanceScopeResult
{
/// <summary>
/// Whether the operation succeeded.
/// </summary>
public bool Success { get; init; }
/// <summary>
/// Created or updated provenance scope ID.
/// </summary>
public Guid? ProvenanceScopeId { get; init; }
/// <summary>
/// Linked evidence reference (if any).
/// </summary>
public Guid? EvidenceRef { get; init; }
/// <summary>
/// Error message if operation failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Whether a new scope was created vs updated.
/// </summary>
public bool WasCreated { get; init; }
public static ProvenanceScopeResult Created(Guid scopeId, Guid? evidenceRef = null) => new()
{
Success = true,
ProvenanceScopeId = scopeId,
EvidenceRef = evidenceRef,
WasCreated = true
};
public static ProvenanceScopeResult Updated(Guid scopeId, Guid? evidenceRef = null) => new()
{
Success = true,
ProvenanceScopeId = scopeId,
EvidenceRef = evidenceRef,
WasCreated = false
};
public static ProvenanceScopeResult Failed(string error) => new()
{
Success = false,
ErrorMessage = error
};
public static ProvenanceScopeResult NoEvidence() => new()
{
Success = true,
ProvenanceScopeId = null,
WasCreated = false
};
}

View File

@@ -0,0 +1,120 @@
// -----------------------------------------------------------------------------
// ProvenanceScope.cs
// Sprint: SPRINT_8200_0015_0001 (Backport Integration)
// Task: BACKPORT-8200-001
// Description: Domain model for distro-specific provenance tracking.
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Merge.Backport;
/// <summary>
/// Distro-specific provenance for a canonical advisory.
/// Tracks backport versions, patch lineage, and evidence confidence.
/// </summary>
public sealed record ProvenanceScope
{
/// <summary>Unique identifier.</summary>
public Guid Id { get; init; }
/// <summary>Referenced canonical advisory.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>Linux distribution release (e.g., 'debian:bookworm', 'rhel:9.2', 'ubuntu:22.04').</summary>
public required string DistroRelease { get; init; }
/// <summary>Distro's backported version if different from upstream fixed version.</summary>
public string? BackportSemver { get; init; }
/// <summary>Upstream commit SHA or patch identifier.</summary>
public string? PatchId { get; init; }
/// <summary>Source of the patch.</summary>
public PatchOrigin? PatchOrigin { get; init; }
/// <summary>Reference to BackportProofService evidence in proofchain.</summary>
public Guid? EvidenceRef { get; init; }
/// <summary>Confidence score from BackportProofService (0.0-1.0).</summary>
public double Confidence { get; init; }
/// <summary>Record creation timestamp.</summary>
public DateTimeOffset CreatedAt { get; init; }
/// <summary>Last update timestamp.</summary>
public DateTimeOffset UpdatedAt { get; init; }
}
/// <summary>
/// Source of a patch in provenance tracking.
/// </summary>
public enum PatchOrigin
{
/// <summary>Unknown or unspecified origin.</summary>
Unknown = 0,
/// <summary>Patch from upstream project.</summary>
Upstream = 1,
/// <summary>Distro-specific patch by maintainers.</summary>
Distro = 2,
/// <summary>Vendor-specific patch.</summary>
Vendor = 3
}
/// <summary>
/// Evidence used in backport determination.
/// </summary>
public sealed record BackportEvidence
{
/// <summary>CVE identifier.</summary>
public required string CveId { get; init; }
/// <summary>Package PURL.</summary>
public required string PackagePurl { get; init; }
/// <summary>Linux distribution release.</summary>
public required string DistroRelease { get; init; }
/// <summary>Evidence tier (quality level).</summary>
public BackportEvidenceTier Tier { get; init; }
/// <summary>Confidence score (0.0-1.0).</summary>
public double Confidence { get; init; }
/// <summary>Upstream commit SHA or patch identifier.</summary>
public string? PatchId { get; init; }
/// <summary>Distro's backported version.</summary>
public string? BackportVersion { get; init; }
/// <summary>Origin of the patch.</summary>
public PatchOrigin PatchOrigin { get; init; }
/// <summary>Reference to the proof blob ID for traceability.</summary>
public string? ProofId { get; init; }
/// <summary>When the evidence was collected.</summary>
public DateTimeOffset EvidenceDate { get; init; }
}
/// <summary>
/// Tiers of backport evidence quality.
/// </summary>
public enum BackportEvidenceTier
{
/// <summary>No evidence found.</summary>
None = 0,
/// <summary>Tier 1: Direct distro advisory confirms fix.</summary>
DistroAdvisory = 1,
/// <summary>Tier 2: Changelog mentions CVE.</summary>
ChangelogMention = 2,
/// <summary>Tier 3: Patch header or HunkSig match.</summary>
PatchHeader = 3,
/// <summary>Tier 4: Binary fingerprint match.</summary>
BinaryFingerprint = 4
}

View File

@@ -0,0 +1,338 @@
// -----------------------------------------------------------------------------
// ProvenanceScopeService.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Tasks: BACKPORT-8200-014, BACKPORT-8200-015, BACKPORT-8200-016
// Description: Service for managing provenance scope lifecycle
// -----------------------------------------------------------------------------
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Merge.Backport;
/// <summary>
/// Service for managing provenance scope during canonical advisory lifecycle.
/// </summary>
public sealed partial class ProvenanceScopeService : IProvenanceScopeService
{
private readonly IProvenanceScopeStore _store;
private readonly IBackportEvidenceResolver? _evidenceResolver;
private readonly ILogger<ProvenanceScopeService> _logger;
public ProvenanceScopeService(
IProvenanceScopeStore store,
ILogger<ProvenanceScopeService> logger,
IBackportEvidenceResolver? evidenceResolver = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_evidenceResolver = evidenceResolver; // Optional - if not provided, uses advisory data only
}
/// <inheritdoc />
public async Task<ProvenanceScopeResult> CreateOrUpdateAsync(
ProvenanceScopeRequest request,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogDebug(
"Creating/updating provenance scope for canonical {CanonicalId}, source {Source}",
request.CanonicalId, request.Source);
// 1. Extract distro release from package PURL
var distroRelease = ExtractDistroRelease(request.PackagePurl, request.Source);
// 2. Resolve backport evidence if resolver is available
BackportEvidence? evidence = null;
if (_evidenceResolver is not null && request.ResolveEvidence)
{
try
{
evidence = await _evidenceResolver.ResolveAsync(
request.CveId,
request.PackagePurl,
ct).ConfigureAwait(false);
if (evidence is not null)
{
_logger.LogDebug(
"Resolved backport evidence for {CveId}/{Package}: tier={Tier}, confidence={Confidence:P0}",
request.CveId, request.PackagePurl, evidence.Tier, evidence.Confidence);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to resolve backport evidence for {CveId}/{Package}",
request.CveId, request.PackagePurl);
}
}
// 3. Check for existing scope
var existing = await _store.GetByCanonicalAndDistroAsync(
request.CanonicalId,
distroRelease,
ct).ConfigureAwait(false);
// 4. Prepare scope data
var scope = new ProvenanceScope
{
Id = existing?.Id ?? Guid.NewGuid(),
CanonicalId = request.CanonicalId,
DistroRelease = distroRelease,
BackportSemver = evidence?.BackportVersion ?? request.FixedVersion,
PatchId = evidence?.PatchId ?? ExtractPatchId(request.PatchLineage),
PatchOrigin = evidence?.PatchOrigin ?? DeterminePatchOrigin(request.Source),
EvidenceRef = null, // Will be linked separately
Confidence = evidence?.Confidence ?? DetermineDefaultConfidence(request.Source),
CreatedAt = existing?.CreatedAt ?? DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
};
// 5. Upsert scope
var scopeId = await _store.UpsertAsync(scope, ct).ConfigureAwait(false);
_logger.LogInformation(
"{Action} provenance scope {ScopeId} for canonical {CanonicalId} ({Distro})",
existing is null ? "Created" : "Updated",
scopeId, request.CanonicalId, distroRelease);
return existing is null
? ProvenanceScopeResult.Created(scopeId)
: ProvenanceScopeResult.Updated(scopeId);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ProvenanceScope>> GetByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default)
{
return await _store.GetByCanonicalIdAsync(canonicalId, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<ProvenanceScopeResult> UpdateFromEvidenceAsync(
Guid canonicalId,
BackportEvidence evidence,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(evidence);
_logger.LogDebug(
"Updating provenance scope for canonical {CanonicalId} from evidence (tier={Tier})",
canonicalId, evidence.Tier);
// Check for existing scope
var existing = await _store.GetByCanonicalAndDistroAsync(
canonicalId,
evidence.DistroRelease,
ct).ConfigureAwait(false);
// Only update if evidence is better (higher tier or confidence)
if (existing is not null &&
existing.Confidence >= evidence.Confidence &&
!string.IsNullOrEmpty(existing.PatchId))
{
_logger.LogDebug(
"Skipping update - existing scope has equal/better confidence ({Existing:P0} >= {New:P0})",
existing.Confidence, evidence.Confidence);
return ProvenanceScopeResult.Updated(existing.Id);
}
var scope = new ProvenanceScope
{
Id = existing?.Id ?? Guid.NewGuid(),
CanonicalId = canonicalId,
DistroRelease = evidence.DistroRelease,
BackportSemver = evidence.BackportVersion,
PatchId = evidence.PatchId,
PatchOrigin = evidence.PatchOrigin,
EvidenceRef = null,
Confidence = evidence.Confidence,
CreatedAt = existing?.CreatedAt ?? DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
};
var scopeId = await _store.UpsertAsync(scope, ct).ConfigureAwait(false);
_logger.LogInformation(
"Updated provenance scope {ScopeId} from evidence (tier={Tier}, confidence={Confidence:P0})",
scopeId, evidence.Tier, evidence.Confidence);
return existing is null
? ProvenanceScopeResult.Created(scopeId)
: ProvenanceScopeResult.Updated(scopeId);
}
/// <inheritdoc />
public async Task LinkEvidenceRefAsync(
Guid provenanceScopeId,
Guid evidenceRef,
CancellationToken ct = default)
{
_logger.LogDebug(
"Linking evidence ref {EvidenceRef} to provenance scope {ScopeId}",
evidenceRef, provenanceScopeId);
await _store.LinkEvidenceRefAsync(provenanceScopeId, evidenceRef, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task DeleteByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default)
{
await _store.DeleteByCanonicalIdAsync(canonicalId, ct).ConfigureAwait(false);
_logger.LogDebug(
"Deleted provenance scopes for canonical {CanonicalId}",
canonicalId);
}
#region Helper Methods
private static string ExtractDistroRelease(string packagePurl, string source)
{
// Try to extract from PURL first
var match = PurlDistroRegex().Match(packagePurl);
if (match.Success)
{
// Group 2 is the distro name (debian, ubuntu, etc.), Group 1 is package type (deb, rpm, apk)
var purlDistro = match.Groups[2].Value.ToLowerInvariant();
// Try to get release from version
var versionMatch = PurlVersionRegex().Match(packagePurl);
if (versionMatch.Success)
{
var version = versionMatch.Groups[1].Value;
// Debian: ~deb11, ~deb12
var debMatch = DebianReleaseRegex().Match(version);
if (debMatch.Success)
{
return $"{purlDistro}:{MapDebianCodename(debMatch.Groups[1].Value)}";
}
// RHEL: .el7, .el8, .el9
var rhelMatch = RhelReleaseRegex().Match(version);
if (rhelMatch.Success)
{
return $"{purlDistro}:{rhelMatch.Groups[1].Value}";
}
// Ubuntu: ~22.04
var ubuntuMatch = UbuntuReleaseRegex().Match(version);
if (ubuntuMatch.Success)
{
return $"{purlDistro}:{ubuntuMatch.Groups[1].Value}";
}
}
return purlDistro;
}
// Fall back to source name
return source.ToLowerInvariant();
}
private static string MapDebianCodename(string version)
{
return version switch
{
"10" => "buster",
"11" => "bullseye",
"12" => "bookworm",
"13" => "trixie",
_ => version
};
}
private static string? ExtractPatchId(string? patchLineage)
{
if (string.IsNullOrWhiteSpace(patchLineage))
{
return null;
}
// Try to extract commit SHA
var shaMatch = CommitShaRegex().Match(patchLineage);
if (shaMatch.Success)
{
return shaMatch.Value.ToLowerInvariant();
}
return patchLineage.Trim();
}
private static PatchOrigin DeterminePatchOrigin(string source)
{
return source.ToLowerInvariant() switch
{
"debian" or "redhat" or "suse" or "ubuntu" or "alpine" or "astra" => PatchOrigin.Distro,
"vendor" or "cisco" or "oracle" or "microsoft" or "adobe" => PatchOrigin.Vendor,
_ => PatchOrigin.Upstream
};
}
private static double DetermineDefaultConfidence(string source)
{
// Distro sources have higher default confidence
return source.ToLowerInvariant() switch
{
"debian" or "redhat" or "suse" or "ubuntu" or "alpine" => 0.7,
"vendor" or "cisco" or "oracle" => 0.8,
_ => 0.5
};
}
[GeneratedRegex(@"pkg:(deb|rpm|apk)/([^/]+)/")]
private static partial Regex PurlDistroRegex();
[GeneratedRegex(@"@([^@]+)$")]
private static partial Regex PurlVersionRegex();
[GeneratedRegex(@"[+~]deb(\d+)")]
private static partial Regex DebianReleaseRegex();
[GeneratedRegex(@"\.el(\d+)")]
private static partial Regex RhelReleaseRegex();
[GeneratedRegex(@"[+~](\d+\.\d+)")]
private static partial Regex UbuntuReleaseRegex();
[GeneratedRegex(@"[0-9a-f]{40}", RegexOptions.IgnoreCase)]
private static partial Regex CommitShaRegex();
#endregion
}
/// <summary>
/// Store interface for provenance scope persistence.
/// </summary>
public interface IProvenanceScopeStore
{
Task<ProvenanceScope?> GetByCanonicalAndDistroAsync(
Guid canonicalId,
string distroRelease,
CancellationToken ct = default);
Task<IReadOnlyList<ProvenanceScope>> GetByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default);
Task<Guid> UpsertAsync(
ProvenanceScope scope,
CancellationToken ct = default);
Task LinkEvidenceRefAsync(
Guid provenanceScopeId,
Guid evidenceRef,
CancellationToken ct = default);
Task DeleteByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,82 @@
// -----------------------------------------------------------------------------
// BackportServiceCollectionExtensions.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-023
// Description: DI registration for backport-related services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Concelier.Merge.Backport;
using StellaOps.Concelier.Merge.Precedence;
namespace StellaOps.Concelier.Merge;
/// <summary>
/// Extensions for registering backport-related services.
/// </summary>
public static class BackportServiceCollectionExtensions
{
/// <summary>
/// Adds backport-related services including provenance scope management and source precedence.
/// </summary>
public static IServiceCollection AddBackportServices(
this IServiceCollection services,
IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
// Configure precedence options from configuration
var section = configuration.GetSection("concelier:merge:precedence");
services.AddSingleton(sp =>
{
var config = new PrecedenceConfig();
if (section.Exists())
{
var backportBoostThreshold = section.GetValue<double?>("backportBoostThreshold");
var backportBoostAmount = section.GetValue<int?>("backportBoostAmount");
var enableBackportBoost = section.GetValue<bool?>("enableBackportBoost");
config = new PrecedenceConfig
{
BackportBoostThreshold = backportBoostThreshold ?? config.BackportBoostThreshold,
BackportBoostAmount = backportBoostAmount ?? config.BackportBoostAmount,
EnableBackportBoost = enableBackportBoost ?? config.EnableBackportBoost
};
}
return Microsoft.Extensions.Options.Options.Create(config);
});
// Register source precedence lattice
services.TryAddSingleton<ISourcePrecedenceLattice, ConfigurableSourcePrecedenceLattice>();
// Register provenance scope service
services.TryAddScoped<IProvenanceScopeService, ProvenanceScopeService>();
// Register backport evidence resolver (optional - depends on proof generator availability)
services.TryAddScoped<IBackportEvidenceResolver, BackportEvidenceResolver>();
return services;
}
/// <summary>
/// Adds backport services with default configuration.
/// </summary>
public static IServiceCollection AddBackportServices(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
// Use default configuration
services.AddSingleton(_ => Microsoft.Extensions.Options.Options.Create(new PrecedenceConfig()));
services.TryAddSingleton<ISourcePrecedenceLattice, ConfigurableSourcePrecedenceLattice>();
services.TryAddScoped<IProvenanceScopeService, ProvenanceScopeService>();
services.TryAddScoped<IBackportEvidenceResolver, BackportEvidenceResolver>();
return services;
}
}

View File

@@ -34,9 +34,11 @@ public sealed partial class PatchLineageNormalizer : IPatchLineageNormalizer
/// <summary>
/// Pattern for GitHub/GitLab commit URLs.
/// GitHub: /owner/repo/commit/sha
/// GitLab: /owner/repo/-/commit/sha
/// </summary>
[GeneratedRegex(
@"(?:github\.com|gitlab\.com)/[^/]+/[^/]+/commit/([0-9a-f]{7,40})",
@"(?:github\.com|gitlab\.com)/[^/]+/[^/]+(?:/-)?/commit/([0-9a-f]{7,40})",
RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex CommitUrlPattern();

View File

@@ -0,0 +1,284 @@
// -----------------------------------------------------------------------------
// ConfigurableSourcePrecedenceLattice.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Tasks: BACKPORT-8200-019, BACKPORT-8200-020, BACKPORT-8200-021
// Description: Configurable source precedence with backport-aware overrides
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Merge.Backport;
namespace StellaOps.Concelier.Merge.Precedence;
/// <summary>
/// Configurable source precedence lattice with backport-aware dynamic overrides.
/// Distro sources with high-confidence backport evidence can take precedence
/// over upstream/vendor sources for affected CVE contexts.
/// </summary>
public sealed class ConfigurableSourcePrecedenceLattice : ISourcePrecedenceLattice
{
private readonly PrecedenceConfig _config;
private readonly ILogger<ConfigurableSourcePrecedenceLattice> _logger;
/// <summary>
/// Sources that are considered distro sources for backport boost eligibility.
/// </summary>
private static readonly HashSet<string> DistroSources = new(StringComparer.OrdinalIgnoreCase)
{
"debian",
"redhat",
"suse",
"ubuntu",
"alpine",
"astra",
"centos",
"fedora",
"rocky",
"alma",
"oracle-linux"
};
public ConfigurableSourcePrecedenceLattice(
IOptions<PrecedenceConfig> options,
ILogger<ConfigurableSourcePrecedenceLattice> logger)
{
_config = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Creates a lattice with default configuration.
/// </summary>
public ConfigurableSourcePrecedenceLattice(ILogger<ConfigurableSourcePrecedenceLattice> logger)
: this(Microsoft.Extensions.Options.Options.Create(new PrecedenceConfig()), logger)
{
}
/// <inheritdoc />
public int BackportBoostAmount => _config.BackportBoostAmount;
/// <inheritdoc />
public double BackportBoostThreshold => _config.BackportBoostThreshold;
/// <inheritdoc />
public int GetPrecedence(string source, BackportContext? context = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(source);
var normalizedSource = source.ToLowerInvariant();
// 1. Check for CVE-specific override first
if (context is not null)
{
var overrideKey = $"{context.CveId}:{normalizedSource}";
if (_config.Overrides.TryGetValue(overrideKey, out var cveOverride))
{
_logger.LogDebug(
"Using CVE-specific override for {Source} on {CveId}: {Precedence}",
source, context.CveId, cveOverride);
return cveOverride;
}
}
// 2. Get base precedence
var basePrecedence = GetBasePrecedence(normalizedSource);
// 3. Apply backport boost if eligible
if (context is not null && ShouldApplyBackportBoost(normalizedSource, context))
{
var boostedPrecedence = basePrecedence - _config.BackportBoostAmount;
_logger.LogDebug(
"Applied backport boost to {Source}: {Base} -> {Boosted} (evidence tier={Tier}, confidence={Confidence:P0})",
source, basePrecedence, boostedPrecedence, context.EvidenceTier, context.EvidenceConfidence);
return boostedPrecedence;
}
return basePrecedence;
}
/// <inheritdoc />
public SourceComparison Compare(
string source1,
string source2,
BackportContext? context = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(source1);
ArgumentException.ThrowIfNullOrWhiteSpace(source2);
var precedence1 = GetPrecedence(source1, context);
var precedence2 = GetPrecedence(source2, context);
// Lower precedence value = higher priority
if (precedence1 < precedence2)
{
return SourceComparison.Source1Higher;
}
if (precedence2 < precedence1)
{
return SourceComparison.Source2Higher;
}
return SourceComparison.Equal;
}
/// <inheritdoc />
public bool IsDistroSource(string source)
{
ArgumentException.ThrowIfNullOrWhiteSpace(source);
return DistroSources.Contains(source);
}
/// <summary>
/// Gets the base precedence for a source without any context-dependent boosts.
/// </summary>
private int GetBasePrecedence(string normalizedSource)
{
if (_config.DefaultPrecedence.TryGetValue(normalizedSource, out var configured))
{
return configured;
}
// Unknown sources get lowest priority
_logger.LogDebug(
"Unknown source '{Source}' - assigning default precedence 1000",
normalizedSource);
return 1000;
}
/// <summary>
/// Determines if backport boost should be applied to a source in the given context.
/// </summary>
private bool ShouldApplyBackportBoost(string normalizedSource, BackportContext context)
{
// Only distro sources are eligible for backport boost
if (!IsDistroSource(normalizedSource))
{
return false;
}
// Boost must be enabled in config
if (!_config.EnableBackportBoost)
{
return false;
}
// Must have backport evidence
if (!context.HasBackportEvidence)
{
return false;
}
// Confidence must meet threshold
if (context.EvidenceConfidence < _config.BackportBoostThreshold)
{
_logger.LogDebug(
"Backport evidence confidence {Confidence:P0} below threshold {Threshold:P0} for {Source}",
context.EvidenceConfidence, _config.BackportBoostThreshold, normalizedSource);
return false;
}
// Evidence tier 1-2 gets boost (direct advisory or changelog mention)
// Tier 3-4 (patch header, binary fingerprint) require higher confidence
if (context.EvidenceTier >= BackportEvidenceTier.PatchHeader &&
context.EvidenceConfidence < 0.9)
{
_logger.LogDebug(
"Lower tier evidence (tier={Tier}) requires 90% confidence, got {Confidence:P0}",
context.EvidenceTier, context.EvidenceConfidence);
return false;
}
return true;
}
}
/// <summary>
/// Exception rule for source precedence that can override defaults for specific CVE patterns.
/// </summary>
public sealed record PrecedenceExceptionRule
{
/// <summary>
/// CVE pattern to match (supports wildcards: CVE-2024-* or exact: CVE-2024-1234).
/// </summary>
public required string CvePattern { get; init; }
/// <summary>
/// Source this rule applies to.
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Precedence value to use when rule matches.
/// </summary>
public required int Precedence { get; init; }
/// <summary>
/// Optional comment explaining why this exception exists.
/// </summary>
public string? Reason { get; init; }
/// <summary>
/// Whether this rule is currently active.
/// </summary>
public bool IsActive { get; init; } = true;
/// <summary>
/// Checks if this rule matches the given CVE ID.
/// </summary>
public bool Matches(string cveId)
{
if (string.IsNullOrWhiteSpace(cveId))
{
return false;
}
if (CvePattern.EndsWith('*'))
{
var prefix = CvePattern[..^1];
return cveId.StartsWith(prefix, StringComparison.OrdinalIgnoreCase);
}
return string.Equals(cveId, CvePattern, StringComparison.OrdinalIgnoreCase);
}
}
/// <summary>
/// Extended precedence configuration with exception rules.
/// Uses composition to extend PrecedenceConfig.
/// </summary>
public sealed record ExtendedPrecedenceConfig
{
/// <summary>
/// Base precedence configuration.
/// </summary>
public PrecedenceConfig BaseConfig { get; init; } = new();
/// <summary>
/// Exception rules that override default precedence for matching CVEs.
/// </summary>
public List<PrecedenceExceptionRule> ExceptionRules { get; init; } = [];
/// <summary>
/// Gets all active exception rules.
/// </summary>
public IEnumerable<PrecedenceExceptionRule> GetActiveRules() =>
ExceptionRules.Where(r => r.IsActive);
/// <summary>
/// Finds the first matching exception rule for a CVE/source combination.
/// </summary>
public PrecedenceExceptionRule? FindMatchingRule(string cveId, string source)
{
var normalizedSource = source.ToLowerInvariant();
return GetActiveRules()
.FirstOrDefault(r =>
string.Equals(r.Source, normalizedSource, StringComparison.OrdinalIgnoreCase) &&
r.Matches(cveId));
}
}

View File

@@ -0,0 +1,184 @@
// -----------------------------------------------------------------------------
// ISourcePrecedenceLattice.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-018
// Description: Interface for configurable source precedence with backport awareness
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Merge.Backport;
namespace StellaOps.Concelier.Merge.Precedence;
/// <summary>
/// Lattice for determining source precedence in merge decisions.
/// Supports backport-aware overrides where distro sources with backport
/// evidence can take precedence over upstream/vendor sources.
/// </summary>
public interface ISourcePrecedenceLattice
{
/// <summary>
/// Gets the precedence rank for a source (lower = higher priority).
/// </summary>
/// <param name="source">Source identifier (debian, redhat, nvd, etc.)</param>
/// <param name="context">Optional backport context for dynamic precedence</param>
/// <returns>Precedence rank (lower values = higher priority)</returns>
int GetPrecedence(string source, BackportContext? context = null);
/// <summary>
/// Compares two sources to determine which takes precedence.
/// </summary>
/// <param name="source1">First source identifier</param>
/// <param name="source2">Second source identifier</param>
/// <param name="context">Optional backport context for dynamic precedence</param>
/// <returns>Comparison result indicating which source has higher precedence</returns>
SourceComparison Compare(
string source1,
string source2,
BackportContext? context = null);
/// <summary>
/// Checks if a source is a distro source that benefits from backport boost.
/// </summary>
bool IsDistroSource(string source);
/// <summary>
/// Gets the backport boost amount applied to distro sources with evidence.
/// </summary>
int BackportBoostAmount { get; }
/// <summary>
/// Gets the minimum confidence threshold for backport boost to apply.
/// </summary>
double BackportBoostThreshold { get; }
}
/// <summary>
/// Context for backport-aware precedence decisions.
/// </summary>
public sealed record BackportContext
{
/// <summary>
/// CVE identifier being evaluated.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Distro release context (e.g., debian:bookworm).
/// </summary>
public string? DistroRelease { get; init; }
/// <summary>
/// Whether backport evidence exists for this CVE/distro.
/// </summary>
public bool HasBackportEvidence { get; init; }
/// <summary>
/// Confidence score from backport evidence (0.0-1.0).
/// </summary>
public double EvidenceConfidence { get; init; }
/// <summary>
/// Evidence tier (1-4).
/// </summary>
public BackportEvidenceTier EvidenceTier { get; init; }
/// <summary>
/// Creates context indicating no backport evidence.
/// </summary>
public static BackportContext NoEvidence(string cveId) => new()
{
CveId = cveId,
HasBackportEvidence = false
};
/// <summary>
/// Creates context from backport evidence.
/// </summary>
public static BackportContext FromEvidence(BackportEvidence evidence) => new()
{
CveId = evidence.CveId,
DistroRelease = evidence.DistroRelease,
HasBackportEvidence = true,
EvidenceConfidence = evidence.Confidence,
EvidenceTier = evidence.Tier
};
}
/// <summary>
/// Result of source precedence comparison.
/// </summary>
public enum SourceComparison
{
/// <summary>Source1 has higher precedence (should be preferred).</summary>
Source1Higher,
/// <summary>Source2 has higher precedence (should be preferred).</summary>
Source2Higher,
/// <summary>Both sources have equal precedence.</summary>
Equal
}
/// <summary>
/// Configuration for source precedence rules.
/// </summary>
public sealed record PrecedenceConfig
{
/// <summary>
/// Default precedence ranks by source (lower = higher priority).
/// </summary>
public Dictionary<string, int> DefaultPrecedence { get; init; } = new(StringComparer.OrdinalIgnoreCase)
{
// Vendor PSIRT sources (highest priority)
["vendor-psirt"] = 10,
["cisco"] = 10,
["oracle"] = 10,
["microsoft"] = 10,
["adobe"] = 10,
// Distro sources
["debian"] = 20,
["redhat"] = 20,
["suse"] = 20,
["ubuntu"] = 20,
["alpine"] = 20,
["astra"] = 20,
// Aggregated sources
["osv"] = 30,
["ghsa"] = 35,
// NVD (baseline)
["nvd"] = 40,
// CERT sources
["cert-cc"] = 50,
["cert-bund"] = 50,
["cert-fr"] = 50,
// Community/fallback
["community"] = 100
};
/// <summary>
/// Specific CVE/source pair overrides.
/// Format: "CVE-2024-1234:debian" -> precedence value.
/// </summary>
public Dictionary<string, int> Overrides { get; init; } = new(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Minimum confidence for backport boost to apply.
/// </summary>
public double BackportBoostThreshold { get; init; } = 0.7;
/// <summary>
/// Precedence points subtracted for distro with backport evidence.
/// Lower = higher priority, so subtracting makes the source more preferred.
/// </summary>
public int BackportBoostAmount { get; init; } = 15;
/// <summary>
/// Whether to enable backport-aware precedence boost.
/// </summary>
public bool EnableBackportBoost { get; init; } = true;
}

View File

@@ -13,6 +13,8 @@ using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage.Aliases;
using StellaOps.Concelier.Storage.MergeEvents;
using StellaOps.Messaging.Abstractions;
using StellaOps.Provcache.Events;
using System.Text.Json;
using StellaOps.Provenance;
@@ -43,6 +45,7 @@ public sealed class AdvisoryMergeService
private readonly TimeProvider _timeProvider;
private readonly CanonicalMerger _canonicalMerger;
private readonly IMergeHashCalculator? _mergeHashCalculator;
private readonly IEventStream<FeedEpochAdvancedEvent>? _feedEpochEventStream;
private readonly ILogger<AdvisoryMergeService> _logger;
public AdvisoryMergeService(
@@ -54,7 +57,8 @@ public sealed class AdvisoryMergeService
IAdvisoryEventLog eventLog,
TimeProvider timeProvider,
ILogger<AdvisoryMergeService> logger,
IMergeHashCalculator? mergeHashCalculator = null)
IMergeHashCalculator? mergeHashCalculator = null,
IEventStream<FeedEpochAdvancedEvent>? feedEpochEventStream = null)
{
_aliasResolver = aliasResolver ?? throw new ArgumentNullException(nameof(aliasResolver));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
@@ -65,6 +69,7 @@ public sealed class AdvisoryMergeService
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_mergeHashCalculator = mergeHashCalculator; // Optional during migration
_feedEpochEventStream = feedEpochEventStream; // Optional for feed epoch invalidation
}
public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken)
@@ -141,9 +146,93 @@ public sealed class AdvisoryMergeService
var conflictSummaries = await AppendEventLogAsync(canonicalKey, normalizedInputs, merged, conflictDetails, cancellationToken).ConfigureAwait(false);
// Publish FeedEpochAdvancedEvent if merge produced changes
await PublishFeedEpochAdvancedAsync(before, merged, inputs, cancellationToken).ConfigureAwait(false);
return new AdvisoryMergeResult(seedAdvisoryKey, canonicalKey, component, inputs, before, merged, conflictSummaries);
}
/// <summary>
/// Publishes a FeedEpochAdvancedEvent when merge produces a new or modified canonical advisory.
/// This triggers Provcache invalidation for cached decisions based on older feed data.
/// </summary>
private async Task PublishFeedEpochAdvancedAsync(
Advisory? before,
Advisory merged,
IReadOnlyList<Advisory> inputs,
CancellationToken cancellationToken)
{
if (_feedEpochEventStream is null)
{
return;
}
// Determine if this is a new or modified canonical
var isNew = before is null;
var isModified = before is not null && before.MergeHash != merged.MergeHash;
if (!isNew && !isModified)
{
return; // No change, no need to publish
}
// Extract primary source from inputs for feedId
var feedId = ExtractPrimaryFeedId(inputs) ?? "canonical";
// Compute epochs based on modification timestamps
var previousEpoch = before?.Modified?.ToString("O") ?? "initial";
var newEpoch = merged.Modified?.ToString("O") ?? _timeProvider.GetUtcNow().ToString("O");
var effectiveAt = _timeProvider.GetUtcNow();
var @event = FeedEpochAdvancedEvent.Create(
feedId: feedId,
previousEpoch: previousEpoch,
newEpoch: newEpoch,
effectiveAt: effectiveAt,
advisoriesAdded: isNew ? 1 : 0,
advisoriesModified: isModified ? 1 : 0);
try
{
await _feedEpochEventStream.PublishAsync(@event, options: null, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Published FeedEpochAdvancedEvent for feed {FeedId}: {PreviousEpoch} -> {NewEpoch}",
feedId, previousEpoch, newEpoch);
}
catch (Exception ex)
{
// Log but don't fail the merge operation for event publishing failures
_logger.LogWarning(
ex,
"Failed to publish FeedEpochAdvancedEvent for feed {FeedId}",
feedId);
}
}
/// <summary>
/// Extracts the primary feed identifier from merged advisory inputs.
/// </summary>
private static string? ExtractPrimaryFeedId(IReadOnlyList<Advisory> inputs)
{
foreach (var advisory in inputs)
{
foreach (var provenance in advisory.Provenance)
{
if (string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (!string.IsNullOrWhiteSpace(provenance.Source))
{
return provenance.Source.ToLowerInvariant();
}
}
}
return null;
}
private async Task<IReadOnlyList<MergeConflictSummary>> AppendEventLogAsync(
string vulnerabilityKey,
IReadOnlyList<Advisory> inputs,

View File

@@ -3,6 +3,7 @@ namespace StellaOps.Concelier.Merge.Services;
using System.Security.Cryptography;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Merge.Backport;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.MergeEvents;
@@ -35,6 +36,28 @@ public sealed class MergeEventWriter
IReadOnlyList<Guid> inputDocumentIds,
IReadOnlyList<MergeFieldDecision>? fieldDecisions,
CancellationToken cancellationToken)
{
return await AppendAsync(
advisoryKey,
before,
after,
inputDocumentIds,
fieldDecisions,
backportEvidence: null,
cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Appends a merge event with optional backport evidence for audit.
/// </summary>
public async Task<MergeEventRecord> AppendAsync(
string advisoryKey,
Advisory? before,
Advisory after,
IReadOnlyList<Guid> inputDocumentIds,
IReadOnlyList<MergeFieldDecision>? fieldDecisions,
IReadOnlyList<BackportEvidence>? backportEvidence,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryKey);
ArgumentNullException.ThrowIfNull(after);
@@ -44,6 +67,9 @@ public sealed class MergeEventWriter
var timestamp = _timeProvider.GetUtcNow();
var documentIds = inputDocumentIds?.ToArray() ?? Array.Empty<Guid>();
// Convert backport evidence to audit decisions
var evidenceDecisions = ConvertToAuditDecisions(backportEvidence);
var record = new MergeEventRecord(
Guid.NewGuid(),
advisoryKey,
@@ -51,7 +77,8 @@ public sealed class MergeEventWriter
afterHash,
timestamp,
documentIds,
fieldDecisions ?? Array.Empty<MergeFieldDecision>());
fieldDecisions ?? Array.Empty<MergeFieldDecision>(),
evidenceDecisions);
if (!CryptographicOperations.FixedTimeEquals(beforeHash, afterHash))
{
@@ -66,7 +93,34 @@ public sealed class MergeEventWriter
_logger.LogInformation("Merge event for {AdvisoryKey} recorded without hash change", advisoryKey);
}
if (evidenceDecisions is { Count: > 0 })
{
_logger.LogDebug(
"Merge event for {AdvisoryKey} includes {Count} backport evidence decision(s)",
advisoryKey,
evidenceDecisions.Count);
}
await _mergeEventStore.AppendAsync(record, cancellationToken).ConfigureAwait(false);
return record;
}
private static IReadOnlyList<BackportEvidenceDecision>? ConvertToAuditDecisions(
IReadOnlyList<BackportEvidence>? evidence)
{
if (evidence is null || evidence.Count == 0)
{
return null;
}
return evidence.Select(e => new BackportEvidenceDecision(
e.CveId,
e.DistroRelease,
e.Tier.ToString(),
e.Confidence,
e.PatchId,
e.PatchOrigin.ToString(),
e.ProofId,
e.EvidenceDate)).ToArray();
}
}

View File

@@ -13,6 +13,10 @@
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.ProofService/StellaOps.Concelier.ProofService.csproj" />
<ProjectReference Include="../../../Attestor/__Libraries/StellaOps.Attestor.ProofChain/StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Provcache/StellaOps.Provcache.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.VersionComparison/StellaOps.VersionComparison.csproj" />
</ItemGroup>
</Project>

View File

@@ -667,7 +667,8 @@ namespace StellaOps.Concelier.Storage.MergeEvents
byte[] AfterHash,
DateTimeOffset MergedAt,
IReadOnlyList<Guid> InputDocumentIds,
IReadOnlyList<MergeFieldDecision> FieldDecisions);
IReadOnlyList<MergeFieldDecision> FieldDecisions,
IReadOnlyList<BackportEvidenceDecision>? BackportEvidence = null);
public sealed record MergeFieldDecision(
string Field,
@@ -676,6 +677,19 @@ namespace StellaOps.Concelier.Storage.MergeEvents
DateTimeOffset? SelectedModified,
IReadOnlyList<string> ConsideredSources);
/// <summary>
/// Records backport evidence used in a merge decision for audit purposes.
/// </summary>
public sealed record BackportEvidenceDecision(
string CveId,
string DistroRelease,
string EvidenceTier,
double Confidence,
string? PatchId,
string? PatchOrigin,
string? ProofId,
DateTimeOffset EvidenceDate);
public interface IMergeEventStore
{
Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken);

View File

@@ -0,0 +1,225 @@
// -----------------------------------------------------------------------------
// ScanCompletedEventHandler.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-025
// Description: Hosted service that subscribes to Scanner ScanCompleted events
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Concelier.SbomIntegration.Events;
/// <summary>
/// Background service that subscribes to Scanner ScanCompleted events
/// and triggers automatic SBOM learning.
/// </summary>
public sealed class ScanCompletedEventHandler : BackgroundService
{
private readonly IEventStream<ScanCompletedEvent>? _eventStream;
private readonly ISbomRegistryService _sbomService;
private readonly ILogger<ScanCompletedEventHandler> _logger;
private readonly ScanCompletedHandlerOptions _options;
public ScanCompletedEventHandler(
IEventStream<ScanCompletedEvent>? eventStream,
ISbomRegistryService sbomService,
IOptions<ScanCompletedHandlerOptions> options,
ILogger<ScanCompletedEventHandler> logger)
{
_eventStream = eventStream;
_sbomService = sbomService ?? throw new ArgumentNullException(nameof(sbomService));
_options = options?.Value ?? new ScanCompletedHandlerOptions();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (_eventStream is null)
{
_logger.LogWarning("Event stream not configured, ScanCompleted event handler disabled");
return;
}
if (!_options.Enabled)
{
_logger.LogInformation("ScanCompleted event handler disabled by configuration");
return;
}
_logger.LogInformation(
"Starting ScanCompleted event handler, subscribing to stream {StreamName}",
_eventStream.StreamName);
try
{
await foreach (var streamEvent in _eventStream.SubscribeAsync(
StreamPosition.End, // Start from latest events
stoppingToken))
{
await ProcessEventAsync(streamEvent.Event, stoppingToken).ConfigureAwait(false);
}
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
_logger.LogInformation("ScanCompleted event handler stopped");
}
catch (Exception ex)
{
_logger.LogError(ex, "ScanCompleted event handler failed");
throw;
}
}
private async Task ProcessEventAsync(ScanCompletedEvent @event, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(@event.SbomDigest))
{
_logger.LogDebug(
"Scan {ScanId} completed without SBOM digest, skipping SBOM learning",
@event.ScanId);
return;
}
_logger.LogInformation(
"Processing ScanCompleted event: ScanId={ScanId}, Image={ImageDigest}, SBOM={SbomDigest}",
@event.ScanId, @event.ImageDigest, @event.SbomDigest);
try
{
// Build PURL list from scan findings
var purls = @event.Purls ?? [];
if (purls.Count == 0)
{
_logger.LogDebug(
"Scan {ScanId} has no PURLs, skipping SBOM learning",
@event.ScanId);
return;
}
// Build reachability map from findings
var reachabilityMap = BuildReachabilityMap(@event);
var input = new SbomRegistrationInput
{
Digest = @event.SbomDigest,
Format = ParseSbomFormat(@event.SbomFormat),
SpecVersion = @event.SbomSpecVersion ?? "1.6",
PrimaryName = @event.ImageName,
PrimaryVersion = @event.ImageTag,
Purls = purls,
Source = "scanner",
TenantId = @event.TenantId,
ReachabilityMap = reachabilityMap
};
var result = await _sbomService.LearnSbomAsync(input, cancellationToken)
.ConfigureAwait(false);
_logger.LogInformation(
"Auto-learned SBOM from scan {ScanId}: {MatchCount} matches, {ScoresUpdated} scores updated",
@event.ScanId, result.Matches.Count, result.ScoresUpdated);
}
catch (Exception ex)
{
_logger.LogError(
ex,
"Failed to process ScanCompleted event for scan {ScanId}",
@event.ScanId);
// Don't rethrow - continue processing other events
}
}
private static Dictionary<string, bool>? BuildReachabilityMap(ScanCompletedEvent @event)
{
if (@event.ReachabilityData is null || @event.ReachabilityData.Count == 0)
{
return null;
}
return @event.ReachabilityData.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value);
}
private static SbomFormat ParseSbomFormat(string? format)
{
return format?.ToLowerInvariant() switch
{
"cyclonedx" => SbomFormat.CycloneDX,
"spdx" => SbomFormat.SPDX,
_ => SbomFormat.CycloneDX
};
}
}
/// <summary>
/// Event published when a scan completes.
/// </summary>
public sealed record ScanCompletedEvent
{
/// <summary>Unique scan identifier.</summary>
public required string ScanId { get; init; }
/// <summary>Report identifier.</summary>
public string? ReportId { get; init; }
/// <summary>Scanned image digest.</summary>
public string? ImageDigest { get; init; }
/// <summary>Image name (repository).</summary>
public string? ImageName { get; init; }
/// <summary>Image tag.</summary>
public string? ImageTag { get; init; }
/// <summary>SBOM content digest.</summary>
public string? SbomDigest { get; init; }
/// <summary>SBOM format.</summary>
public string? SbomFormat { get; init; }
/// <summary>SBOM specification version.</summary>
public string? SbomSpecVersion { get; init; }
/// <summary>Extracted PURLs from SBOM.</summary>
public IReadOnlyList<string>? Purls { get; init; }
/// <summary>Reachability data per PURL.</summary>
public IReadOnlyDictionary<string, bool>? ReachabilityData { get; init; }
/// <summary>Deployment data per PURL.</summary>
public IReadOnlyDictionary<string, bool>? DeploymentData { get; init; }
/// <summary>Tenant identifier.</summary>
public string? TenantId { get; init; }
/// <summary>Scan verdict (pass/fail).</summary>
public string? Verdict { get; init; }
/// <summary>When the scan completed.</summary>
public DateTimeOffset CompletedAt { get; init; } = DateTimeOffset.UtcNow;
}
/// <summary>
/// Configuration options for ScanCompleted event handler.
/// </summary>
public sealed class ScanCompletedHandlerOptions
{
/// <summary>Whether the handler is enabled.</summary>
public bool Enabled { get; set; } = true;
/// <summary>Stream name to subscribe to.</summary>
public string StreamName { get; set; } = "scanner:events:scan-completed";
/// <summary>Maximum concurrent event processing.</summary>
public int MaxConcurrency { get; set; } = 4;
/// <summary>Retry count for failed processing.</summary>
public int RetryCount { get; set; } = 3;
}

View File

@@ -0,0 +1,306 @@
// -----------------------------------------------------------------------------
// ScannerEventHandler.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-025
// Description: Subscribes to Scanner events for auto-learning SBOMs
// -----------------------------------------------------------------------------
using System.Text.Json;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Concelier.SbomIntegration.Events;
/// <summary>
/// Hosted service that subscribes to Scanner SBOM events for auto-learning.
/// </summary>
public sealed class ScannerEventHandler : BackgroundService
{
/// <summary>
/// Stream name for orchestrator events.
/// </summary>
public const string OrchestratorStreamName = "orchestrator:events";
/// <summary>
/// Event kind for SBOM generated.
/// </summary>
public const string SbomGeneratedKind = "scanner.event.sbom.generated";
/// <summary>
/// Event kind for scan completed.
/// </summary>
public const string ScanCompletedKind = "scanner.event.scan.completed";
private readonly IEventStream<OrchestratorEventEnvelope>? _eventStream;
private readonly ISbomRegistryService _registryService;
private readonly IScannerSbomFetcher? _sbomFetcher;
private readonly ILogger<ScannerEventHandler> _logger;
private long _eventsProcessed;
private long _sbomsLearned;
private long _errors;
public ScannerEventHandler(
ISbomRegistryService registryService,
ILogger<ScannerEventHandler> logger,
IEventStream<OrchestratorEventEnvelope>? eventStream = null,
IScannerSbomFetcher? sbomFetcher = null)
{
_registryService = registryService ?? throw new ArgumentNullException(nameof(registryService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_eventStream = eventStream;
_sbomFetcher = sbomFetcher;
}
/// <summary>
/// Gets the number of events processed.
/// </summary>
public long EventsProcessed => Interlocked.Read(ref _eventsProcessed);
/// <summary>
/// Gets the number of SBOMs learned.
/// </summary>
public long SbomsLearned => Interlocked.Read(ref _sbomsLearned);
/// <summary>
/// Gets the number of errors.
/// </summary>
public long Errors => Interlocked.Read(ref _errors);
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (_eventStream is null)
{
_logger.LogWarning(
"ScannerEventHandler disabled: no IEventStream<OrchestratorEventEnvelope> configured");
return;
}
_logger.LogInformation(
"ScannerEventHandler started, subscribing to {StreamName}",
_eventStream.StreamName);
try
{
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, stoppingToken))
{
try
{
await HandleEventAsync(streamEvent.Event, stoppingToken).ConfigureAwait(false);
Interlocked.Increment(ref _eventsProcessed);
}
catch (Exception ex)
{
Interlocked.Increment(ref _errors);
_logger.LogError(ex,
"Error processing orchestrator event {EventId} kind {Kind}",
streamEvent.Event.EventId,
streamEvent.Event.Kind);
}
}
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
// Normal shutdown
}
catch (Exception ex)
{
_logger.LogError(ex, "Fatal error in ScannerEventHandler event processing loop");
throw;
}
}
private async Task HandleEventAsync(OrchestratorEventEnvelope envelope, CancellationToken cancellationToken)
{
switch (envelope.Kind)
{
case SbomGeneratedKind:
await HandleSbomGeneratedAsync(envelope, cancellationToken).ConfigureAwait(false);
break;
case ScanCompletedKind:
// ScanCompleted events contain findings but not the full SBOM
// We could use this to enrich reachability data
_logger.LogDebug(
"Received ScanCompleted event {EventId} for digest {Digest}",
envelope.EventId,
envelope.Scope?.Digest);
break;
default:
// Ignore other event types
break;
}
}
private async Task HandleSbomGeneratedAsync(
OrchestratorEventEnvelope envelope,
CancellationToken cancellationToken)
{
if (envelope.Payload is null)
{
_logger.LogWarning("SbomGenerated event {EventId} has no payload", envelope.EventId);
return;
}
// Parse the SBOM generated payload
var payload = ParseSbomGeneratedPayload(envelope.Payload.Value);
if (payload is null || string.IsNullOrEmpty(payload.Digest))
{
_logger.LogWarning(
"SbomGenerated event {EventId} has invalid payload",
envelope.EventId);
return;
}
_logger.LogInformation(
"Processing SbomGenerated event {EventId}: SBOM {SbomId} with {ComponentCount} components",
envelope.EventId,
payload.SbomId,
payload.ComponentCount);
// Fetch SBOM content if we have a fetcher
IReadOnlyList<string> purls;
if (_sbomFetcher is not null && !string.IsNullOrEmpty(payload.SbomRef))
{
purls = await _sbomFetcher.FetchPurlsAsync(payload.SbomRef, cancellationToken)
.ConfigureAwait(false);
}
else
{
_logger.LogWarning(
"Cannot fetch SBOM content for {SbomId}: no fetcher configured or no SbomRef",
payload.SbomId);
return;
}
if (purls.Count == 0)
{
_logger.LogWarning("SBOM {SbomId} has no PURLs", payload.SbomId);
return;
}
// Create registration input
var input = new SbomRegistrationInput
{
Digest = payload.Digest,
Format = ParseSbomFormat(payload.Format),
SpecVersion = payload.SpecVersion ?? "1.6",
PrimaryName = envelope.Scope?.Repo,
PrimaryVersion = envelope.Scope?.Digest,
Purls = purls,
Source = "scanner-event",
TenantId = envelope.Tenant
};
// Learn the SBOM
try
{
var result = await _registryService.LearnSbomAsync(input, cancellationToken)
.ConfigureAwait(false);
Interlocked.Increment(ref _sbomsLearned);
_logger.LogInformation(
"Auto-learned SBOM {Digest} from scanner event: {MatchCount} advisories matched, {ScoresUpdated} scores updated",
payload.Digest,
result.Matches.Count,
result.ScoresUpdated);
}
catch (Exception ex)
{
Interlocked.Increment(ref _errors);
_logger.LogError(ex,
"Failed to auto-learn SBOM {Digest} from scanner event",
payload.Digest);
}
}
private static SbomGeneratedPayload? ParseSbomGeneratedPayload(JsonElement? payload)
{
if (payload is null || payload.Value.ValueKind == JsonValueKind.Undefined)
{
return null;
}
try
{
return payload.Value.Deserialize<SbomGeneratedPayload>();
}
catch
{
return null;
}
}
private static SbomFormat ParseSbomFormat(string? format)
{
return format?.ToLowerInvariant() switch
{
"spdx" => SbomFormat.SPDX,
_ => SbomFormat.CycloneDX
};
}
}
/// <summary>
/// Envelope for orchestrator events received from the event stream.
/// </summary>
public sealed record OrchestratorEventEnvelope
{
public Guid EventId { get; init; }
public string Kind { get; init; } = string.Empty;
public int Version { get; init; } = 1;
public string? Tenant { get; init; }
public DateTimeOffset OccurredAt { get; init; }
public DateTimeOffset? RecordedAt { get; init; }
public string? Source { get; init; }
public string? IdempotencyKey { get; init; }
public string? CorrelationId { get; init; }
public OrchestratorEventScope? Scope { get; init; }
public JsonElement? Payload { get; init; }
}
/// <summary>
/// Scope for orchestrator events.
/// </summary>
public sealed record OrchestratorEventScope
{
public string? Namespace { get; init; }
public string? Repo { get; init; }
public string? Digest { get; init; }
}
/// <summary>
/// Payload for SBOM generated events.
/// </summary>
internal sealed record SbomGeneratedPayload
{
public string ScanId { get; init; } = string.Empty;
public string SbomId { get; init; } = string.Empty;
public DateTimeOffset GeneratedAt { get; init; }
public string Format { get; init; } = "cyclonedx";
public string? SpecVersion { get; init; }
public int ComponentCount { get; init; }
public string? SbomRef { get; init; }
public string? Digest { get; init; }
}
/// <summary>
/// Interface for fetching SBOM content from Scanner service.
/// </summary>
public interface IScannerSbomFetcher
{
/// <summary>
/// Fetches PURLs from an SBOM by reference.
/// </summary>
/// <param name="sbomRef">Reference to the SBOM (URL or ID).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of PURLs extracted from the SBOM.</returns>
Task<IReadOnlyList<string>> FetchPurlsAsync(
string sbomRef,
CancellationToken cancellationToken = default);
}

View File

@@ -108,5 +108,13 @@ public interface ISbomRegistryRepository
DateTimeOffset lastMatched,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates the PURL list for an SBOM.
/// </summary>
Task UpdatePurlsAsync(
string digest,
IReadOnlyList<string> purls,
CancellationToken cancellationToken = default);
#endregion
}

View File

@@ -1,12 +1,13 @@
// -----------------------------------------------------------------------------
// ServiceCollectionExtensions.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-000
// Tasks: SBOM-8200-000, SBOM-8200-025
// Description: DI registration for SBOM integration services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Concelier.SbomIntegration.Events;
using StellaOps.Concelier.SbomIntegration.Index;
using StellaOps.Concelier.SbomIntegration.Matching;
using StellaOps.Concelier.SbomIntegration.Parsing;
@@ -61,4 +62,30 @@ public static class ServiceCollectionExtensions
return services;
}
/// <summary>
/// Adds the Scanner event handler for auto-learning SBOMs.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierSbomAutoLearning(this IServiceCollection services)
{
services.AddHostedService<ScanCompletedEventHandler>();
return services;
}
/// <summary>
/// Adds the Scanner event handler with custom options.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Options configuration action.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierSbomAutoLearning(
this IServiceCollection services,
Action<ScanCompletedHandlerOptions> configureOptions)
{
services.Configure(configureOptions);
services.AddHostedService<ScanCompletedEventHandler>();
return services;
}
}

View File

@@ -0,0 +1,56 @@
-- Concelier Migration 017: Provenance Scope Table
-- Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
-- Task: BACKPORT-8200-000
-- Creates distro-specific backport and patch provenance per canonical
-- Distro-specific provenance for canonical advisories
CREATE TABLE IF NOT EXISTS vuln.provenance_scope (
-- Identity
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
canonical_id UUID NOT NULL REFERENCES vuln.advisory_canonical(id) ON DELETE CASCADE,
-- Distro context
distro_release TEXT NOT NULL, -- e.g., 'debian:bookworm', 'rhel:9.2', 'ubuntu:22.04'
-- Patch provenance
backport_semver TEXT, -- distro's backported version if different from upstream
patch_id TEXT, -- upstream commit SHA or patch identifier
patch_origin TEXT CHECK (patch_origin IN ('upstream', 'distro', 'vendor')),
-- Evidence linkage
evidence_ref UUID, -- FK to proofchain.proof_entries (if available)
confidence NUMERIC(3,2) NOT NULL DEFAULT 0.5 CHECK (confidence >= 0 AND confidence <= 1),
-- Audit
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT uq_provenance_scope_canonical_distro UNIQUE (canonical_id, distro_release)
);
-- Primary lookup indexes
CREATE INDEX IF NOT EXISTS idx_provenance_scope_canonical ON vuln.provenance_scope(canonical_id);
CREATE INDEX IF NOT EXISTS idx_provenance_scope_distro ON vuln.provenance_scope(distro_release);
CREATE INDEX IF NOT EXISTS idx_provenance_scope_patch ON vuln.provenance_scope(patch_id) WHERE patch_id IS NOT NULL;
-- Filtered indexes for common queries
CREATE INDEX IF NOT EXISTS idx_provenance_scope_high_confidence ON vuln.provenance_scope(confidence DESC) WHERE confidence >= 0.7;
CREATE INDEX IF NOT EXISTS idx_provenance_scope_origin ON vuln.provenance_scope(patch_origin) WHERE patch_origin IS NOT NULL;
-- Time-based index for incremental queries
CREATE INDEX IF NOT EXISTS idx_provenance_scope_updated ON vuln.provenance_scope(updated_at DESC);
-- Trigger for automatic updated_at
CREATE TRIGGER trg_provenance_scope_updated
BEFORE UPDATE ON vuln.provenance_scope
FOR EACH ROW EXECUTE FUNCTION vuln.update_timestamp();
-- Comments
COMMENT ON TABLE vuln.provenance_scope IS 'Distro-specific backport and patch provenance per canonical advisory';
COMMENT ON COLUMN vuln.provenance_scope.distro_release IS 'Linux distribution release identifier (e.g., debian:bookworm, rhel:9.2)';
COMMENT ON COLUMN vuln.provenance_scope.backport_semver IS 'Distro version containing backport (may differ from upstream fixed version)';
COMMENT ON COLUMN vuln.provenance_scope.patch_id IS 'Upstream commit SHA or patch identifier for lineage tracking';
COMMENT ON COLUMN vuln.provenance_scope.patch_origin IS 'Source of the patch: upstream project, distro maintainer, or vendor';
COMMENT ON COLUMN vuln.provenance_scope.evidence_ref IS 'Reference to BackportProofService evidence in proofchain';
COMMENT ON COLUMN vuln.provenance_scope.confidence IS 'Confidence score from BackportProofService (0.0-1.0)';

View File

@@ -0,0 +1,64 @@
// -----------------------------------------------------------------------------
// ProvenanceScopeEntity.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-001
// Description: Entity for distro-specific backport and patch provenance
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Storage.Postgres.Models;
/// <summary>
/// Represents distro-specific backport and patch provenance per canonical advisory.
/// </summary>
public sealed class ProvenanceScopeEntity
{
/// <summary>
/// Unique provenance scope identifier.
/// </summary>
public required Guid Id { get; init; }
/// <summary>
/// Reference to the canonical advisory.
/// </summary>
public required Guid CanonicalId { get; init; }
/// <summary>
/// Linux distribution release identifier (e.g., debian:bookworm, rhel:9.2, ubuntu:22.04).
/// </summary>
public required string DistroRelease { get; init; }
/// <summary>
/// Distro version containing backport (may differ from upstream fixed version).
/// </summary>
public string? BackportSemver { get; init; }
/// <summary>
/// Upstream commit SHA or patch identifier for lineage tracking.
/// </summary>
public string? PatchId { get; init; }
/// <summary>
/// Source of the patch: upstream, distro, or vendor.
/// </summary>
public string? PatchOrigin { get; init; }
/// <summary>
/// Reference to BackportProofService evidence in proofchain.
/// </summary>
public Guid? EvidenceRef { get; init; }
/// <summary>
/// Confidence score from BackportProofService (0.0-1.0).
/// </summary>
public decimal Confidence { get; init; } = 0.5m;
/// <summary>
/// When the provenance scope record was created.
/// </summary>
public DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When the provenance scope record was last updated.
/// </summary>
public DateTimeOffset UpdatedAt { get; init; }
}

View File

@@ -0,0 +1,169 @@
// -----------------------------------------------------------------------------
// IProvenanceScopeRepository.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-002
// Description: Repository interface for provenance scope operations
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// Repository interface for distro-specific provenance scope operations.
/// </summary>
public interface IProvenanceScopeRepository
{
#region CRUD Operations
/// <summary>
/// Gets a provenance scope by ID.
/// </summary>
Task<ProvenanceScopeEntity?> GetByIdAsync(Guid id, CancellationToken ct = default);
/// <summary>
/// Gets a provenance scope by canonical ID and distro release.
/// </summary>
Task<ProvenanceScopeEntity?> GetByCanonicalAndDistroAsync(
Guid canonicalId,
string distroRelease,
CancellationToken ct = default);
/// <summary>
/// Gets all provenance scopes for a canonical advisory.
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default);
/// <summary>
/// Gets all provenance scopes for a distro release.
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetByDistroReleaseAsync(
string distroRelease,
CancellationToken ct = default);
/// <summary>
/// Gets provenance scopes by patch ID (for lineage tracking).
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetByPatchIdAsync(
string patchId,
CancellationToken ct = default);
/// <summary>
/// Upserts a provenance scope (insert or update by canonical_id + distro_release).
/// </summary>
Task<Guid> UpsertAsync(ProvenanceScopeEntity entity, CancellationToken ct = default);
/// <summary>
/// Updates an existing provenance scope.
/// </summary>
Task UpdateAsync(ProvenanceScopeEntity entity, CancellationToken ct = default);
/// <summary>
/// Deletes a provenance scope.
/// </summary>
Task DeleteAsync(Guid id, CancellationToken ct = default);
/// <summary>
/// Deletes all provenance scopes for a canonical advisory.
/// </summary>
Task DeleteByCanonicalIdAsync(Guid canonicalId, CancellationToken ct = default);
#endregion
#region Query Operations
/// <summary>
/// Gets provenance scopes with high confidence (>= threshold).
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetHighConfidenceAsync(
decimal threshold = 0.7m,
int limit = 1000,
CancellationToken ct = default);
/// <summary>
/// Gets provenance scopes updated since a given time.
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetUpdatedSinceAsync(
DateTimeOffset since,
int limit = 1000,
CancellationToken ct = default);
/// <summary>
/// Gets provenance scopes by patch origin (upstream, distro, vendor).
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetByPatchOriginAsync(
string patchOrigin,
int limit = 1000,
CancellationToken ct = default);
/// <summary>
/// Gets provenance scopes with linked evidence.
/// </summary>
Task<IReadOnlyList<ProvenanceScopeEntity>> GetWithEvidenceAsync(
int limit = 1000,
CancellationToken ct = default);
/// <summary>
/// Streams all provenance scopes for batch processing.
/// </summary>
IAsyncEnumerable<ProvenanceScopeEntity> StreamAllAsync(CancellationToken ct = default);
#endregion
#region Statistics
/// <summary>
/// Gets provenance scope statistics.
/// </summary>
Task<ProvenanceScopeStatistics> GetStatisticsAsync(CancellationToken ct = default);
/// <summary>
/// Counts provenance scopes by distro release.
/// </summary>
Task<IReadOnlyDictionary<string, long>> CountByDistroAsync(CancellationToken ct = default);
#endregion
}
/// <summary>
/// Statistics about provenance scope records.
/// </summary>
public sealed record ProvenanceScopeStatistics
{
/// <summary>
/// Total provenance scope count.
/// </summary>
public long TotalScopes { get; init; }
/// <summary>
/// Count of scopes with high confidence (>= 0.7).
/// </summary>
public long HighConfidenceScopes { get; init; }
/// <summary>
/// Count of scopes with linked evidence.
/// </summary>
public long ScopesWithEvidence { get; init; }
/// <summary>
/// Average confidence score.
/// </summary>
public decimal AvgConfidence { get; init; }
/// <summary>
/// Count of unique canonical advisories with provenance.
/// </summary>
public long UniqueCanonicals { get; init; }
/// <summary>
/// Count of unique distro releases tracked.
/// </summary>
public long UniqueDistros { get; init; }
/// <summary>
/// Most recent provenance scope update time.
/// </summary>
public DateTimeOffset? LastUpdatedAt { get; init; }
}

View File

@@ -0,0 +1,155 @@
// -----------------------------------------------------------------------------
// PostgresProvenanceScopeStore.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Tasks: BACKPORT-8200-014, BACKPORT-8200-015, BACKPORT-8200-016
// Description: PostgreSQL store implementation for provenance scope
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Merge.Backport;
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL implementation of IProvenanceScopeStore.
/// Bridges the domain ProvenanceScope model to the persistence layer.
/// </summary>
public sealed class PostgresProvenanceScopeStore : IProvenanceScopeStore
{
private readonly IProvenanceScopeRepository _repository;
public PostgresProvenanceScopeStore(IProvenanceScopeRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
/// <inheritdoc />
public async Task<ProvenanceScope?> GetByCanonicalAndDistroAsync(
Guid canonicalId,
string distroRelease,
CancellationToken ct = default)
{
var entity = await _repository.GetByCanonicalAndDistroAsync(canonicalId, distroRelease, ct)
.ConfigureAwait(false);
return entity is null ? null : MapToDomain(entity);
}
/// <inheritdoc />
public async Task<IReadOnlyList<ProvenanceScope>> GetByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default)
{
var entities = await _repository.GetByCanonicalIdAsync(canonicalId, ct)
.ConfigureAwait(false);
return entities.Select(MapToDomain).ToList();
}
/// <inheritdoc />
public async Task<Guid> UpsertAsync(ProvenanceScope scope, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(scope);
var entity = MapToEntity(scope);
return await _repository.UpsertAsync(entity, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task LinkEvidenceRefAsync(
Guid provenanceScopeId,
Guid evidenceRef,
CancellationToken ct = default)
{
var existing = await _repository.GetByIdAsync(provenanceScopeId, ct).ConfigureAwait(false);
if (existing is null)
{
return;
}
// Create updated entity with evidence ref
var updated = new ProvenanceScopeEntity
{
Id = existing.Id,
CanonicalId = existing.CanonicalId,
DistroRelease = existing.DistroRelease,
BackportSemver = existing.BackportSemver,
PatchId = existing.PatchId,
PatchOrigin = existing.PatchOrigin,
EvidenceRef = evidenceRef,
Confidence = existing.Confidence,
CreatedAt = existing.CreatedAt,
UpdatedAt = DateTimeOffset.UtcNow
};
await _repository.UpdateAsync(updated, ct).ConfigureAwait(false);
}
/// <inheritdoc />
public Task DeleteByCanonicalIdAsync(Guid canonicalId, CancellationToken ct = default)
{
return _repository.DeleteByCanonicalIdAsync(canonicalId, ct);
}
#region Mapping
private static ProvenanceScope MapToDomain(ProvenanceScopeEntity entity)
{
return new ProvenanceScope
{
Id = entity.Id,
CanonicalId = entity.CanonicalId,
DistroRelease = entity.DistroRelease,
BackportSemver = entity.BackportSemver,
PatchId = entity.PatchId,
PatchOrigin = ParsePatchOrigin(entity.PatchOrigin),
EvidenceRef = entity.EvidenceRef,
Confidence = (double)entity.Confidence,
CreatedAt = entity.CreatedAt,
UpdatedAt = entity.UpdatedAt
};
}
private static ProvenanceScopeEntity MapToEntity(ProvenanceScope scope)
{
return new ProvenanceScopeEntity
{
Id = scope.Id,
CanonicalId = scope.CanonicalId,
DistroRelease = scope.DistroRelease,
BackportSemver = scope.BackportSemver,
PatchId = scope.PatchId,
PatchOrigin = MapPatchOriginToString(scope.PatchOrigin),
EvidenceRef = scope.EvidenceRef,
Confidence = (decimal)scope.Confidence,
CreatedAt = scope.CreatedAt,
UpdatedAt = scope.UpdatedAt
};
}
private static Merge.Backport.PatchOrigin? ParsePatchOrigin(string? origin)
{
return origin?.ToLowerInvariant() switch
{
"upstream" => Merge.Backport.PatchOrigin.Upstream,
"distro" => Merge.Backport.PatchOrigin.Distro,
"vendor" => Merge.Backport.PatchOrigin.Vendor,
_ => null
};
}
private static string? MapPatchOriginToString(Merge.Backport.PatchOrigin? origin)
{
return origin switch
{
Merge.Backport.PatchOrigin.Upstream => "upstream",
Merge.Backport.PatchOrigin.Distro => "distro",
Merge.Backport.PatchOrigin.Vendor => "vendor",
Merge.Backport.PatchOrigin.Unknown => null,
null => null,
_ => null
};
}
#endregion
}

View File

@@ -0,0 +1,427 @@
// -----------------------------------------------------------------------------
// ProvenanceScopeRepository.cs
// Sprint: SPRINT_8200_0015_0001_CONCEL_backport_integration
// Task: BACKPORT-8200-003
// Description: PostgreSQL repository for provenance scope operations
// -----------------------------------------------------------------------------
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for provenance scope operations.
/// </summary>
public sealed class ProvenanceScopeRepository : RepositoryBase<ConcelierDataSource>, IProvenanceScopeRepository
{
private const string SystemTenantId = "_system";
public ProvenanceScopeRepository(ConcelierDataSource dataSource, ILogger<ProvenanceScopeRepository> logger)
: base(dataSource, logger)
{
}
#region CRUD Operations
public Task<ProvenanceScopeEntity?> GetByIdAsync(Guid id, CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE id = @id
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "id", id),
MapProvenanceScope,
ct);
}
public Task<ProvenanceScopeEntity?> GetByCanonicalAndDistroAsync(
Guid canonicalId,
string distroRelease,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE canonical_id = @canonical_id AND distro_release = @distro_release
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "canonical_id", canonicalId);
AddParameter(cmd, "distro_release", distroRelease);
},
MapProvenanceScope,
ct);
}
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetByCanonicalIdAsync(
Guid canonicalId,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE canonical_id = @canonical_id
ORDER BY confidence DESC, distro_release
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "canonical_id", canonicalId),
MapProvenanceScope,
ct);
}
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetByDistroReleaseAsync(
string distroRelease,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE distro_release = @distro_release
ORDER BY confidence DESC, updated_at DESC
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "distro_release", distroRelease),
MapProvenanceScope,
ct);
}
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetByPatchIdAsync(
string patchId,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE patch_id = @patch_id
ORDER BY confidence DESC, updated_at DESC
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "patch_id", patchId),
MapProvenanceScope,
ct);
}
public async Task<Guid> UpsertAsync(ProvenanceScopeEntity entity, CancellationToken ct = default)
{
const string sql = """
INSERT INTO vuln.provenance_scope (
id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
)
VALUES (
@id, @canonical_id, @distro_release, @backport_semver, @patch_id,
@patch_origin, @evidence_ref, @confidence, NOW(), NOW()
)
ON CONFLICT (canonical_id, distro_release)
DO UPDATE SET
backport_semver = EXCLUDED.backport_semver,
patch_id = EXCLUDED.patch_id,
patch_origin = EXCLUDED.patch_origin,
evidence_ref = EXCLUDED.evidence_ref,
confidence = EXCLUDED.confidence,
updated_at = NOW()
RETURNING id
""";
var id = entity.Id == Guid.Empty ? Guid.NewGuid() : entity.Id;
var result = await ExecuteScalarAsync<Guid>(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", id);
AddParameter(cmd, "canonical_id", entity.CanonicalId);
AddParameter(cmd, "distro_release", entity.DistroRelease);
AddParameter(cmd, "backport_semver", entity.BackportSemver);
AddParameter(cmd, "patch_id", entity.PatchId);
AddParameter(cmd, "patch_origin", entity.PatchOrigin);
AddParameter(cmd, "evidence_ref", entity.EvidenceRef);
AddParameter(cmd, "confidence", entity.Confidence);
},
ct);
return result;
}
public Task UpdateAsync(ProvenanceScopeEntity entity, CancellationToken ct = default)
{
const string sql = """
UPDATE vuln.provenance_scope
SET backport_semver = @backport_semver,
patch_id = @patch_id,
patch_origin = @patch_origin,
evidence_ref = @evidence_ref,
confidence = @confidence,
updated_at = NOW()
WHERE id = @id
""";
return ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", entity.Id);
AddParameter(cmd, "backport_semver", entity.BackportSemver);
AddParameter(cmd, "patch_id", entity.PatchId);
AddParameter(cmd, "patch_origin", entity.PatchOrigin);
AddParameter(cmd, "evidence_ref", entity.EvidenceRef);
AddParameter(cmd, "confidence", entity.Confidence);
},
ct);
}
public Task DeleteAsync(Guid id, CancellationToken ct = default)
{
const string sql = "DELETE FROM vuln.provenance_scope WHERE id = @id";
return ExecuteAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "id", id),
ct);
}
public Task DeleteByCanonicalIdAsync(Guid canonicalId, CancellationToken ct = default)
{
const string sql = "DELETE FROM vuln.provenance_scope WHERE canonical_id = @canonical_id";
return ExecuteAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "canonical_id", canonicalId),
ct);
}
#endregion
#region Query Operations
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetHighConfidenceAsync(
decimal threshold = 0.7m,
int limit = 1000,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE confidence >= @threshold
ORDER BY confidence DESC, updated_at DESC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "threshold", threshold);
AddParameter(cmd, "limit", limit);
},
MapProvenanceScope,
ct);
}
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetUpdatedSinceAsync(
DateTimeOffset since,
int limit = 1000,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE updated_at > @since
ORDER BY updated_at ASC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "since", since);
AddParameter(cmd, "limit", limit);
},
MapProvenanceScope,
ct);
}
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetByPatchOriginAsync(
string patchOrigin,
int limit = 1000,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE patch_origin = @patch_origin
ORDER BY confidence DESC, updated_at DESC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "patch_origin", patchOrigin);
AddParameter(cmd, "limit", limit);
},
MapProvenanceScope,
ct);
}
public Task<IReadOnlyList<ProvenanceScopeEntity>> GetWithEvidenceAsync(
int limit = 1000,
CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
WHERE evidence_ref IS NOT NULL
ORDER BY confidence DESC, updated_at DESC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "limit", limit),
MapProvenanceScope,
ct);
}
public async IAsyncEnumerable<ProvenanceScopeEntity> StreamAllAsync(
[EnumeratorCancellation] CancellationToken ct = default)
{
const string sql = """
SELECT id, canonical_id, distro_release, backport_semver, patch_id,
patch_origin, evidence_ref, confidence, created_at, updated_at
FROM vuln.provenance_scope
ORDER BY canonical_id, distro_release
""";
await using var connection = await DataSource.OpenSystemConnectionAsync(ct).ConfigureAwait(false);
await using var command = CreateCommand(sql, connection);
await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false);
while (await reader.ReadAsync(ct).ConfigureAwait(false))
{
yield return MapProvenanceScope(reader);
}
}
#endregion
#region Statistics
public async Task<ProvenanceScopeStatistics> GetStatisticsAsync(CancellationToken ct = default)
{
const string sql = """
SELECT
COUNT(*) AS total_scopes,
COUNT(*) FILTER (WHERE confidence >= 0.7) AS high_confidence_scopes,
COUNT(*) FILTER (WHERE evidence_ref IS NOT NULL) AS scopes_with_evidence,
COALESCE(AVG(confidence), 0) AS avg_confidence,
COUNT(DISTINCT canonical_id) AS unique_canonicals,
COUNT(DISTINCT distro_release) AS unique_distros,
MAX(updated_at) AS last_updated_at
FROM vuln.provenance_scope
""";
var result = await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
_ => { },
reader => new ProvenanceScopeStatistics
{
TotalScopes = reader.GetInt64(0),
HighConfidenceScopes = reader.GetInt64(1),
ScopesWithEvidence = reader.GetInt64(2),
AvgConfidence = reader.GetDecimal(3),
UniqueCanonicals = reader.GetInt64(4),
UniqueDistros = reader.GetInt64(5),
LastUpdatedAt = reader.IsDBNull(6) ? null : reader.GetFieldValue<DateTimeOffset>(6)
},
ct);
return result ?? new ProvenanceScopeStatistics();
}
public async Task<IReadOnlyDictionary<string, long>> CountByDistroAsync(CancellationToken ct = default)
{
const string sql = """
SELECT distro_release, COUNT(*) AS count
FROM vuln.provenance_scope
GROUP BY distro_release
ORDER BY count DESC
""";
var results = await QueryAsync(
SystemTenantId,
sql,
_ => { },
reader => new KeyValuePair<string, long>(
reader.GetString(0),
reader.GetInt64(1)),
ct);
return results.ToDictionary(kv => kv.Key, kv => kv.Value);
}
#endregion
#region Mapping
private static ProvenanceScopeEntity MapProvenanceScope(NpgsqlDataReader reader)
{
return new ProvenanceScopeEntity
{
Id = reader.GetGuid(0),
CanonicalId = reader.GetGuid(1),
DistroRelease = reader.GetString(2),
BackportSemver = reader.IsDBNull(3) ? null : reader.GetString(3),
PatchId = reader.IsDBNull(4) ? null : reader.GetString(4),
PatchOrigin = reader.IsDBNull(5) ? null : reader.GetString(5),
EvidenceRef = reader.IsDBNull(6) ? null : reader.GetGuid(6),
Confidence = reader.GetDecimal(7),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(8),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(9)
};
}
#endregion
}

View File

@@ -376,6 +376,37 @@ public sealed class SbomRegistryRepository : RepositoryBase<ConcelierDataSource>
cancellationToken);
}
/// <inheritdoc />
public async Task UpdatePurlsAsync(
string digest,
IReadOnlyList<string> purls,
CancellationToken cancellationToken = default)
{
// First get the SBOM registration to get the ID
var registration = await GetByDigestAsync(digest, cancellationToken).ConfigureAwait(false);
if (registration == null)
{
return;
}
// Update component count based on purls count
const string sql = """
UPDATE vuln.sbom_registry
SET component_count = @component_count
WHERE digest = @digest
""";
await ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "digest", digest);
AddParameter(cmd, "component_count", purls.Count);
},
cancellationToken).ConfigureAwait(false);
}
#endregion
#region Private Helpers

View File

@@ -11,6 +11,7 @@ using ExportingContracts = StellaOps.Concelier.Storage.Exporting;
using JpFlagsContracts = StellaOps.Concelier.Storage.JpFlags;
using PsirtContracts = StellaOps.Concelier.Storage.PsirtFlags;
using HistoryContracts = StellaOps.Concelier.Storage.ChangeHistory;
using StellaOps.Concelier.Merge.Backport;
namespace StellaOps.Concelier.Storage.Postgres;
@@ -61,6 +62,10 @@ public static class ServiceCollectionExtensions
services.AddScoped<JpFlagsContracts.IJpFlagStore, PostgresJpFlagStore>();
services.AddScoped<HistoryContracts.IChangeHistoryStore, PostgresChangeHistoryStore>();
// Provenance scope services (backport integration)
services.AddScoped<Repositories.IProvenanceScopeRepository, ProvenanceScopeRepository>();
services.AddScoped<IProvenanceScopeStore, PostgresProvenanceScopeStore>();
return services;
}
@@ -104,6 +109,10 @@ public static class ServiceCollectionExtensions
services.AddScoped<JpFlagsContracts.IJpFlagStore, PostgresJpFlagStore>();
services.AddScoped<HistoryContracts.IChangeHistoryStore, PostgresChangeHistoryStore>();
// Provenance scope services (backport integration)
services.AddScoped<Repositories.IProvenanceScopeRepository, ProvenanceScopeRepository>();
services.AddScoped<IProvenanceScopeStore, PostgresProvenanceScopeStore>();
return services;
}
}

View File

@@ -33,6 +33,7 @@
<ProjectReference Include="..\StellaOps.Concelier.Interest\StellaOps.Concelier.Interest.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.SbomIntegration\StellaOps.Concelier.SbomIntegration.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
</ItemGroup>