save development progress

This commit is contained in:
StellaOps Bot
2025-12-25 23:09:58 +02:00
parent d71853ad7e
commit aa70af062e
351 changed files with 37683 additions and 150156 deletions

View File

@@ -2364,6 +2364,124 @@ components:
type: array
items:
type: object
cacheHit:
type: boolean
description: Whether the decision was served from cache.
cacheSource:
type: string
enum:
- none
- inMemory
- redis
description: Source of cached data (none for fresh computation, inMemory for L1 cache, redis for Provcache L2).
executionTimeMs:
type: integer
description: Time taken to evaluate the policy in milliseconds.
provcache.TrustScoreComponent:
type: object
required:
- score
- weight
properties:
score:
type: integer
minimum: 0
maximum: 100
description: Component score (0-100).
weight:
type: number
format: float
minimum: 0
maximum: 1
description: Weight of this component in the total score (0.0-1.0).
provcache.TrustScoreBreakdown:
type: object
required:
- reachability
- sbomCompleteness
- vexCoverage
- policyFreshness
- signerTrust
properties:
reachability:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: Reachability evidence contribution (weight 25%).
sbomCompleteness:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: SBOM completeness contribution (weight 20%).
vexCoverage:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: VEX statement coverage contribution (weight 20%).
policyFreshness:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: Policy freshness contribution (weight 15%).
signerTrust:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: Signer trust contribution (weight 20%).
provcache.ReplaySeed:
type: object
required:
- feedIds
- ruleIds
properties:
feedIds:
type: array
items:
type: string
description: Advisory feed identifiers used in evaluation.
ruleIds:
type: array
items:
type: string
description: Policy rule identifiers used in evaluation.
frozenEpoch:
type: string
format: date-time
description: Optional frozen epoch timestamp for deterministic replay.
provcache.DecisionDigest:
type: object
required:
- digestVersion
- veriKey
- verdictHash
- proofRoot
- replaySeed
- createdAt
- expiresAt
- trustScore
properties:
digestVersion:
type: string
description: Schema version of this digest format.
example: v1
veriKey:
type: string
description: Composite cache key that uniquely identifies the provenance decision context.
example: sha256:abc123...
verdictHash:
type: string
description: Hash of sorted dispositions from the evaluation result.
proofRoot:
type: string
description: Merkle root of all evidence chunks used in this decision.
replaySeed:
$ref: '#/components/schemas/provcache.ReplaySeed'
createdAt:
type: string
format: date-time
description: UTC timestamp when this digest was created.
expiresAt:
type: string
format: date-time
description: UTC timestamp when this digest expires.
trustScore:
type: integer
minimum: 0
maximum: 100
description: Composite trust score (0-100) indicating decision confidence.
trustScoreBreakdown:
$ref: '#/components/schemas/provcache.TrustScoreBreakdown'
description: Breakdown of trust score by component.
policy.PolicyListResponse:
type: object
required:

View File

@@ -99,6 +99,9 @@ internal static class CommandFactory
root.Add(DeltaCommandGroup.BuildDeltaCommand(verboseOption, cancellationToken));
root.Add(ReachabilityCommandGroup.BuildReachabilityCommand(services, verboseOption, cancellationToken));
// Sprint: SPRINT_8200_0014_0002 - Federation bundle export
root.Add(FederationCommandGroup.BuildFeedserCommand(services, verboseOption, cancellationToken));
// Add scan graph subcommand to existing scan command
var scanCommand = root.Children.OfType<Command>().FirstOrDefault(c => c.Name == "scan");
if (scanCommand is not null)

View File

@@ -0,0 +1,256 @@
// -----------------------------------------------------------------------------
// CommandHandlers.Federation.cs
// Sprint: SPRINT_8200_0014_0002 (Delta Bundle Export)
// Description: Command handlers for federation bundle operations.
// -----------------------------------------------------------------------------
using System.Net.Http.Headers;
using System.Text.Json;
using Microsoft.Extensions.DependencyInjection;
using Spectre.Console;
namespace StellaOps.Cli.Commands;
internal static partial class CommandHandlers
{
internal static async Task<int> HandleFederationBundleExportAsync(
IServiceProvider services,
string? sinceCursor,
string? output,
bool sign,
int compressLevel,
int maxItems,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
if (verbose)
{
AnsiConsole.MarkupLine("[blue]Exporting federation bundle...[/]");
AnsiConsole.MarkupLine($" Since Cursor: [bold]{Markup.Escape(sinceCursor ?? "(none - full export)")}[/]");
AnsiConsole.MarkupLine($" Sign: {sign}");
AnsiConsole.MarkupLine($" Compression Level: {compressLevel}");
AnsiConsole.MarkupLine($" Max Items: {maxItems}");
}
try
{
var httpClientFactory = services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
AnsiConsole.MarkupLine("[red]Error: HTTP client factory not available.[/]");
return 1;
}
var client = httpClientFactory.CreateClient("Concelier");
// Build query string
var queryParams = new List<string>();
if (!string.IsNullOrEmpty(sinceCursor))
queryParams.Add($"since_cursor={Uri.EscapeDataString(sinceCursor)}");
queryParams.Add($"sign={sign.ToString().ToLowerInvariant()}");
queryParams.Add($"max_items={maxItems}");
queryParams.Add($"compress_level={compressLevel}");
var url = $"/api/v1/federation/export?{string.Join("&", queryParams)}";
using var response = await client.GetAsync(url, HttpCompletionOption.ResponseHeadersRead, cancellationToken);
if (!response.IsSuccessStatusCode)
{
var error = await response.Content.ReadAsStringAsync(cancellationToken);
AnsiConsole.MarkupLine($"[red]Error: {response.StatusCode}[/]");
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]{Markup.Escape(error)}[/]");
}
return 1;
}
// Extract metadata from headers
var bundleHash = response.Headers.TryGetValues("X-Bundle-Hash", out var hashValues)
? hashValues.FirstOrDefault()
: null;
var exportCursor = response.Headers.TryGetValues("X-Export-Cursor", out var cursorValues)
? cursorValues.FirstOrDefault()
: null;
var itemsCount = response.Headers.TryGetValues("X-Items-Count", out var countValues)
? countValues.FirstOrDefault()
: null;
// Determine output stream
Stream outputStream;
bool disposeStream;
if (string.IsNullOrEmpty(output))
{
outputStream = Console.OpenStandardOutput();
disposeStream = false;
}
else
{
outputStream = File.Create(output);
disposeStream = true;
}
try
{
await using var contentStream = await response.Content.ReadAsStreamAsync(cancellationToken);
await contentStream.CopyToAsync(outputStream, cancellationToken);
}
finally
{
if (disposeStream)
{
await outputStream.DisposeAsync();
}
}
// Output metadata
if (!string.IsNullOrEmpty(output))
{
if (json)
{
var metadata = new
{
bundle_hash = bundleHash,
export_cursor = exportCursor,
since_cursor = sinceCursor,
items_count = int.TryParse(itemsCount, out var count) ? count : 0,
output_path = output
};
AnsiConsole.WriteLine(JsonSerializer.Serialize(metadata, new JsonSerializerOptions
{
WriteIndented = true
}));
}
else
{
AnsiConsole.MarkupLine("[green]Bundle exported successfully.[/]");
AnsiConsole.MarkupLine($" Output: [bold]{Markup.Escape(output)}[/]");
if (bundleHash != null)
AnsiConsole.MarkupLine($" Hash: [dim]{bundleHash}[/]");
if (exportCursor != null)
AnsiConsole.MarkupLine($" New Cursor: [bold]{exportCursor}[/]");
if (itemsCount != null)
AnsiConsole.MarkupLine($" Items: {itemsCount}");
}
}
return 0;
}
catch (HttpRequestException ex)
{
AnsiConsole.MarkupLine($"[red]Connection error: {Markup.Escape(ex.Message)}[/]");
return 1;
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"[red]Error: {Markup.Escape(ex.Message)}[/]");
if (verbose)
{
AnsiConsole.WriteException(ex);
}
return 1;
}
}
internal static async Task<int> HandleFederationBundlePreviewAsync(
IServiceProvider services,
string? sinceCursor,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
if (verbose)
{
AnsiConsole.MarkupLine("[blue]Previewing federation export...[/]");
AnsiConsole.MarkupLine($" Since Cursor: [bold]{Markup.Escape(sinceCursor ?? "(none - full export)")}[/]");
}
try
{
var httpClientFactory = services.GetService<IHttpClientFactory>();
if (httpClientFactory == null)
{
AnsiConsole.MarkupLine("[red]Error: HTTP client factory not available.[/]");
return 1;
}
var client = httpClientFactory.CreateClient("Concelier");
var url = "/api/v1/federation/export/preview";
if (!string.IsNullOrEmpty(sinceCursor))
{
url += $"?since_cursor={Uri.EscapeDataString(sinceCursor)}";
}
using var response = await client.GetAsync(url, cancellationToken);
if (!response.IsSuccessStatusCode)
{
var error = await response.Content.ReadAsStringAsync(cancellationToken);
AnsiConsole.MarkupLine($"[red]Error: {response.StatusCode}[/]");
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]{Markup.Escape(error)}[/]");
}
return 1;
}
var content = await response.Content.ReadAsStringAsync(cancellationToken);
if (json)
{
AnsiConsole.WriteLine(content);
}
else
{
var preview = JsonSerializer.Deserialize<PreviewResponse>(content, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
});
if (preview != null)
{
AnsiConsole.MarkupLine("[green]Export Preview[/]");
AnsiConsole.MarkupLine($" Since Cursor: [dim]{sinceCursor ?? "(full export)"}[/]");
AnsiConsole.MarkupLine($" Estimated Canonicals: [bold]{preview.EstimatedCanonicals:N0}[/]");
AnsiConsole.MarkupLine($" Estimated Edges: [bold]{preview.EstimatedEdges:N0}[/]");
AnsiConsole.MarkupLine($" Estimated Deletions: [bold]{preview.EstimatedDeletions:N0}[/]");
AnsiConsole.MarkupLine($" Estimated Size: [bold]{preview.EstimatedSizeMb:F2} MB[/]");
}
else
{
AnsiConsole.WriteLine(content);
}
}
return 0;
}
catch (HttpRequestException ex)
{
AnsiConsole.MarkupLine($"[red]Connection error: {Markup.Escape(ex.Message)}[/]");
return 1;
}
catch (Exception ex)
{
AnsiConsole.MarkupLine($"[red]Error: {Markup.Escape(ex.Message)}[/]");
if (verbose)
{
AnsiConsole.WriteException(ex);
}
return 1;
}
}
private sealed class PreviewResponse
{
public string? SinceCursor { get; set; }
public int EstimatedCanonicals { get; set; }
public int EstimatedEdges { get; set; }
public int EstimatedDeletions { get; set; }
public long EstimatedSizeBytes { get; set; }
public double EstimatedSizeMb { get; set; }
}
}

View File

@@ -0,0 +1,152 @@
// -----------------------------------------------------------------------------
// FederationCommandGroup.cs
// Sprint: SPRINT_8200_0014_0002 (Delta Bundle Export)
// Tasks: EXPORT-8200-025, EXPORT-8200-026 - CLI commands for federation bundle export.
// Description: CLI commands for federation bundle export to support air-gapped sync.
// -----------------------------------------------------------------------------
using System.CommandLine;
using StellaOps.Cli.Extensions;
namespace StellaOps.Cli.Commands;
internal static class FederationCommandGroup
{
internal static Command BuildFeedserCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var feedser = new Command("feedser", "Federation bundle operations for multi-site sync.");
feedser.Add(BuildBundleCommand(services, verboseOption, cancellationToken));
return feedser;
}
private static Command BuildBundleCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var bundle = new Command("bundle", "Federation bundle operations.");
bundle.Add(BuildExportCommand(services, verboseOption, cancellationToken));
bundle.Add(BuildPreviewCommand(services, verboseOption, cancellationToken));
return bundle;
}
private static Command BuildExportCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var sinceCursorOption = new Option<string?>("--since-cursor", new[] { "-c" })
{
Description = "Export changes since this cursor position."
};
var outputOption = new Option<string?>("--output", new[] { "-o" })
{
Description = "Output file path (default: stdout)."
};
var signOption = new Option<bool>("--sign", new[] { "-s" })
{
Description = "Sign the bundle with Authority key."
};
signOption.SetDefaultValue(true);
var compressLevelOption = new Option<int>("--compress-level", new[] { "-l" })
{
Description = "ZST compression level (1-19)."
};
compressLevelOption.SetDefaultValue(3);
var maxItemsOption = new Option<int>("--max-items", new[] { "-m" })
{
Description = "Maximum items per bundle (default: 10000)."
};
maxItemsOption.SetDefaultValue(10000);
var jsonOption = new Option<bool>("--json")
{
Description = "Output metadata as JSON."
};
var command = new Command("export", "Export federation bundle for air-gapped transfer.")
{
sinceCursorOption,
outputOption,
signOption,
compressLevelOption,
maxItemsOption,
jsonOption,
verboseOption
};
command.SetAction(parseResult =>
{
var sinceCursor = parseResult.GetValue(sinceCursorOption);
var output = parseResult.GetValue(outputOption);
var sign = parseResult.GetValue(signOption);
var compressLevel = parseResult.GetValue(compressLevelOption);
var maxItems = parseResult.GetValue(maxItemsOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleFederationBundleExportAsync(
services,
sinceCursor,
output,
sign,
compressLevel,
maxItems,
json,
verbose,
cancellationToken);
});
return command;
}
private static Command BuildPreviewCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var sinceCursorOption = new Option<string?>("--since-cursor", new[] { "-c" })
{
Description = "Preview changes since this cursor position."
};
var jsonOption = new Option<bool>("--json")
{
Description = "Output as JSON."
};
var command = new Command("preview", "Preview export statistics without creating bundle.")
{
sinceCursorOption,
jsonOption,
verboseOption
};
command.SetAction(parseResult =>
{
var sinceCursor = parseResult.GetValue(sinceCursorOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleFederationBundlePreviewAsync(
services,
sinceCursor,
json,
verbose,
cancellationToken);
});
return command;
}
}

View File

@@ -17,6 +17,7 @@ using StellaOps.Configuration;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.ExportCenter.Client;
using StellaOps.ExportCenter.Core.EvidenceCache;
using StellaOps.Cryptography.Plugin.SimRemote.DependencyInjection;
namespace StellaOps.Cli;

View File

@@ -114,6 +114,9 @@ public static class ErrorCodes
/// <summary>AirGap mode is disabled.</summary>
public const string AirGapDisabled = "AIRGAP_DISABLED";
/// <summary>Federation sync is disabled.</summary>
public const string FederationDisabled = "FEDERATION_DISABLED";
/// <summary>Sealed mode violation.</summary>
public const string SealedModeViolation = "SEALED_MODE_VIOLATION";

View File

@@ -1,12 +1,13 @@
// -----------------------------------------------------------------------------
// CanonicalAdvisoryEndpointExtensions.cs
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
// Tasks: CANSVC-8200-016 through CANSVC-8200-019
// Tasks: CANSVC-8200-016 through CANSVC-8200-019, ISCORE-8200-030
// Description: API endpoints for canonical advisory service
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Mvc;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.Interest;
using StellaOps.Concelier.WebService.Results;
using HttpResults = Microsoft.AspNetCore.Http.Results;
@@ -29,14 +30,25 @@ internal static class CanonicalAdvisoryEndpointExtensions
group.MapGet("/{id:guid}", async (
Guid id,
ICanonicalAdvisoryService service,
IInterestScoringService? scoringService,
HttpContext context,
CancellationToken ct) =>
{
var canonical = await service.GetByIdAsync(id, ct).ConfigureAwait(false);
return canonical is null
? HttpResults.NotFound(new { error = "Canonical advisory not found", id })
: HttpResults.Ok(MapToResponse(canonical));
if (canonical is null)
{
return HttpResults.NotFound(new { error = "Canonical advisory not found", id });
}
// Fetch interest score if scoring service is available
Interest.Models.InterestScore? score = null;
if (scoringService is not null)
{
score = await scoringService.GetScoreAsync(id, ct).ConfigureAwait(false);
}
return HttpResults.Ok(MapToResponse(canonical, score));
})
.WithName("GetCanonicalById")
.WithSummary("Get canonical advisory by ID")
@@ -73,7 +85,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
var byCve = await service.GetByCveAsync(cve, ct).ConfigureAwait(false);
return HttpResults.Ok(new CanonicalAdvisoryListResponse
{
Items = byCve.Select(MapToResponse).ToList(),
Items = byCve.Select(c => MapToResponse(c)).ToList(),
TotalCount = byCve.Count
});
}
@@ -84,7 +96,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
var byArtifact = await service.GetByArtifactAsync(artifact, ct).ConfigureAwait(false);
return HttpResults.Ok(new CanonicalAdvisoryListResponse
{
Items = byArtifact.Select(MapToResponse).ToList(),
Items = byArtifact.Select(c => MapToResponse(c)).ToList(),
TotalCount = byArtifact.Count
});
}
@@ -99,7 +111,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
var result = await service.QueryAsync(options, ct).ConfigureAwait(false);
return HttpResults.Ok(new CanonicalAdvisoryListResponse
{
Items = result.Items.Select(MapToResponse).ToList(),
Items = result.Items.Select(c => MapToResponse(c)).ToList(),
TotalCount = result.TotalCount,
Offset = result.Offset,
Limit = result.Limit
@@ -252,7 +264,9 @@ internal static class CanonicalAdvisoryEndpointExtensions
.Produces(StatusCodes.Status400BadRequest);
}
private static CanonicalAdvisoryResponse MapToResponse(CanonicalAdvisory canonical) => new()
private static CanonicalAdvisoryResponse MapToResponse(
CanonicalAdvisory canonical,
Interest.Models.InterestScore? score = null) => new()
{
Id = canonical.Id,
Cve = canonical.Cve,
@@ -268,6 +282,13 @@ internal static class CanonicalAdvisoryEndpointExtensions
Weaknesses = canonical.Weaknesses,
CreatedAt = canonical.CreatedAt,
UpdatedAt = canonical.UpdatedAt,
InterestScore = score is not null ? new InterestScoreInfo
{
Score = score.Score,
Tier = score.Tier.ToString(),
Reasons = score.Reasons,
ComputedAt = score.ComputedAt
} : null,
SourceEdges = canonical.SourceEdges.Select(e => new SourceEdgeResponse
{
Id = e.Id,
@@ -303,9 +324,21 @@ public sealed record CanonicalAdvisoryResponse
public IReadOnlyList<string> Weaknesses { get; init; } = [];
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset UpdatedAt { get; init; }
public InterestScoreInfo? InterestScore { get; init; }
public IReadOnlyList<SourceEdgeResponse> SourceEdges { get; init; } = [];
}
/// <summary>
/// Interest score information embedded in advisory response.
/// </summary>
public sealed record InterestScoreInfo
{
public double Score { get; init; }
public required string Tier { get; init; }
public IReadOnlyList<string> Reasons { get; init; } = [];
public DateTimeOffset ComputedAt { get; init; }
}
/// <summary>
/// Response for a source edge.
/// </summary>

View File

@@ -0,0 +1,132 @@
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Federation.Export;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Concelier.WebService.Options;
using StellaOps.Concelier.WebService.Results;
using HttpResults = Microsoft.AspNetCore.Http.Results;
namespace StellaOps.Concelier.WebService.Extensions;
/// <summary>
/// Endpoint extensions for Federation functionality.
/// Per SPRINT_8200_0014_0002_CONCEL_delta_bundle_export.
/// </summary>
internal static class FederationEndpointExtensions
{
public static void MapConcelierFederationEndpoints(this WebApplication app)
{
var group = app.MapGroup("/api/v1/federation")
.WithTags("Federation");
// GET /api/v1/federation/export - Export delta bundle
group.MapGet("/export", async (
HttpContext context,
IBundleExportService exportService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken,
[FromQuery(Name = "since_cursor")] string? sinceCursor = null,
[FromQuery] bool sign = true,
[FromQuery(Name = "max_items")] int maxItems = 10000,
[FromQuery(Name = "compress_level")] int compressLevel = 3) =>
{
var options = optionsMonitor.CurrentValue;
if (!options.Federation.Enabled)
{
return ConcelierProblemResultFactory.FederationDisabled(context);
}
// Validate parameters
if (maxItems < 1 || maxItems > 100_000)
{
return HttpResults.BadRequest(new { error = "max_items must be between 1 and 100000" });
}
if (compressLevel < 1 || compressLevel > 19)
{
return HttpResults.BadRequest(new { error = "compress_level must be between 1 and 19" });
}
var exportOptions = new BundleExportOptions
{
Sign = sign,
MaxItems = maxItems,
CompressionLevel = compressLevel
};
// Set response headers for streaming
context.Response.ContentType = "application/zstd";
context.Response.Headers.ContentDisposition =
$"attachment; filename=\"feedser-bundle-{DateTime.UtcNow:yyyyMMdd-HHmmss}.zst\"";
// Export directly to response stream
var result = await exportService.ExportToStreamAsync(
context.Response.Body,
sinceCursor,
exportOptions,
cancellationToken);
// Add metadata headers
context.Response.Headers.Append("X-Bundle-Hash", result.BundleHash);
context.Response.Headers.Append("X-Export-Cursor", result.ExportCursor);
context.Response.Headers.Append("X-Items-Count", result.Counts.Total.ToString());
return HttpResults.Empty;
})
.WithName("ExportFederationBundle")
.WithSummary("Export delta bundle for federation sync")
.Produces(200, contentType: "application/zstd")
.ProducesProblem(400)
.ProducesProblem(503);
// GET /api/v1/federation/export/preview - Preview export statistics
group.MapGet("/export/preview", async (
HttpContext context,
IBundleExportService exportService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken,
[FromQuery(Name = "since_cursor")] string? sinceCursor = null) =>
{
var options = optionsMonitor.CurrentValue;
if (!options.Federation.Enabled)
{
return ConcelierProblemResultFactory.FederationDisabled(context);
}
var preview = await exportService.PreviewAsync(sinceCursor, cancellationToken);
return HttpResults.Ok(new
{
since_cursor = sinceCursor,
estimated_canonicals = preview.EstimatedCanonicals,
estimated_edges = preview.EstimatedEdges,
estimated_deletions = preview.EstimatedDeletions,
estimated_size_bytes = preview.EstimatedSizeBytes,
estimated_size_mb = Math.Round(preview.EstimatedSizeBytes / 1024.0 / 1024.0, 2)
});
})
.WithName("PreviewFederationExport")
.WithSummary("Preview export statistics without creating bundle")
.Produces<object>(200)
.ProducesProblem(503);
// GET /api/v1/federation/status - Federation status
group.MapGet("/status", (
HttpContext context,
IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
{
var options = optionsMonitor.CurrentValue;
return HttpResults.Ok(new
{
enabled = options.Federation.Enabled,
site_id = options.Federation.SiteId,
default_compression_level = options.Federation.DefaultCompressionLevel,
default_max_items = options.Federation.DefaultMaxItems
});
})
.WithName("GetFederationStatus")
.WithSummary("Get federation configuration status")
.Produces<object>(200);
}
}

View File

@@ -0,0 +1,311 @@
// -----------------------------------------------------------------------------
// InterestScoreEndpointExtensions.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Tasks: ISCORE-8200-029 through ISCORE-8200-031
// Description: API endpoints for interest scoring service
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Mvc;
using StellaOps.Concelier.Interest;
using StellaOps.Concelier.Interest.Models;
using HttpResults = Microsoft.AspNetCore.Http.Results;
namespace StellaOps.Concelier.WebService.Extensions;
/// <summary>
/// Endpoint extensions for interest score operations.
/// </summary>
internal static class InterestScoreEndpointExtensions
{
private const string ScoreReadPolicy = "Concelier.Interest.Read";
private const string ScoreAdminPolicy = "Concelier.Interest.Admin";
public static void MapInterestScoreEndpoints(this WebApplication app)
{
var group = app.MapGroup("/api/v1")
.WithTags("Interest Scores");
// GET /api/v1/canonical/{id}/score - Get interest score for a canonical advisory
group.MapGet("/canonical/{id:guid}/score", async (
Guid id,
IInterestScoringService scoringService,
CancellationToken ct) =>
{
var score = await scoringService.GetScoreAsync(id, ct).ConfigureAwait(false);
return score is null
? HttpResults.NotFound(new { error = "Interest score not found", canonicalId = id })
: HttpResults.Ok(MapToResponse(score));
})
.WithName("GetInterestScore")
.WithSummary("Get interest score for a canonical advisory")
.Produces<InterestScoreResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// GET /api/v1/scores - Query interest scores
group.MapGet("/scores", async (
[FromQuery] double? minScore,
[FromQuery] double? maxScore,
[FromQuery] int? offset,
[FromQuery] int? limit,
IInterestScoreRepository repository,
CancellationToken ct) =>
{
var scores = await repository.GetAllAsync(offset ?? 0, limit ?? 50, ct).ConfigureAwait(false);
// Filter by score range if specified
var filtered = scores.AsEnumerable();
if (minScore.HasValue)
{
filtered = filtered.Where(s => s.Score >= minScore.Value);
}
if (maxScore.HasValue)
{
filtered = filtered.Where(s => s.Score <= maxScore.Value);
}
var items = filtered.Select(MapToResponse).ToList();
return HttpResults.Ok(new InterestScoreListResponse
{
Items = items,
TotalCount = items.Count,
Offset = offset ?? 0,
Limit = limit ?? 50
});
})
.WithName("QueryInterestScores")
.WithSummary("Query interest scores with optional filtering")
.Produces<InterestScoreListResponse>(StatusCodes.Status200OK);
// GET /api/v1/scores/distribution - Get score distribution statistics
group.MapGet("/scores/distribution", async (
IInterestScoreRepository repository,
CancellationToken ct) =>
{
var distribution = await repository.GetScoreDistributionAsync(ct).ConfigureAwait(false);
return HttpResults.Ok(new ScoreDistributionResponse
{
HighCount = distribution.HighCount,
MediumCount = distribution.MediumCount,
LowCount = distribution.LowCount,
NoneCount = distribution.NoneCount,
TotalCount = distribution.TotalCount,
AverageScore = distribution.AverageScore,
MedianScore = distribution.MedianScore
});
})
.WithName("GetScoreDistribution")
.WithSummary("Get score distribution statistics")
.Produces<ScoreDistributionResponse>(StatusCodes.Status200OK);
// POST /api/v1/canonical/{id}/score/compute - Compute score for a canonical
group.MapPost("/canonical/{id:guid}/score/compute", async (
Guid id,
IInterestScoringService scoringService,
CancellationToken ct) =>
{
var score = await scoringService.ComputeScoreAsync(id, ct).ConfigureAwait(false);
await scoringService.UpdateScoreAsync(score, ct).ConfigureAwait(false);
return HttpResults.Ok(MapToResponse(score));
})
.WithName("ComputeInterestScore")
.WithSummary("Compute and update interest score for a canonical advisory")
.Produces<InterestScoreResponse>(StatusCodes.Status200OK);
// POST /api/v1/scores/recalculate - Admin endpoint to trigger full recalculation
group.MapPost("/scores/recalculate", async (
[FromBody] RecalculateRequest? request,
IInterestScoringService scoringService,
CancellationToken ct) =>
{
int updated;
if (request?.CanonicalIds?.Count > 0)
{
// Batch recalculation for specific IDs
updated = await scoringService.BatchUpdateAsync(request.CanonicalIds, ct).ConfigureAwait(false);
}
else
{
// Full recalculation
updated = await scoringService.RecalculateAllAsync(ct).ConfigureAwait(false);
}
return HttpResults.Accepted((string?)null, new RecalculateResponse
{
Updated = updated,
Mode = request?.CanonicalIds?.Count > 0 ? "batch" : "full",
StartedAt = DateTimeOffset.UtcNow
});
})
.WithName("RecalculateScores")
.WithSummary("Trigger interest score recalculation (full or batch)")
.Produces<RecalculateResponse>(StatusCodes.Status202Accepted);
// POST /api/v1/scores/degrade - Admin endpoint to run stub degradation
group.MapPost("/scores/degrade", async (
[FromBody] DegradeRequest? request,
IInterestScoringService scoringService,
Microsoft.Extensions.Options.IOptions<InterestScoreOptions> options,
CancellationToken ct) =>
{
var threshold = request?.Threshold ?? options.Value.DegradationPolicy.DegradationThreshold;
var degraded = await scoringService.DegradeToStubsAsync(threshold, ct).ConfigureAwait(false);
return HttpResults.Ok(new DegradeResponse
{
Degraded = degraded,
Threshold = threshold,
ExecutedAt = DateTimeOffset.UtcNow
});
})
.WithName("DegradeToStubs")
.WithSummary("Degrade low-interest advisories to stubs")
.Produces<DegradeResponse>(StatusCodes.Status200OK);
// POST /api/v1/scores/restore - Admin endpoint to restore stubs
group.MapPost("/scores/restore", async (
[FromBody] RestoreRequest? request,
IInterestScoringService scoringService,
Microsoft.Extensions.Options.IOptions<InterestScoreOptions> options,
CancellationToken ct) =>
{
var threshold = request?.Threshold ?? options.Value.DegradationPolicy.RestorationThreshold;
var restored = await scoringService.RestoreFromStubsAsync(threshold, ct).ConfigureAwait(false);
return HttpResults.Ok(new RestoreResponse
{
Restored = restored,
Threshold = threshold,
ExecutedAt = DateTimeOffset.UtcNow
});
})
.WithName("RestoreFromStubs")
.WithSummary("Restore stubs with increased interest scores")
.Produces<RestoreResponse>(StatusCodes.Status200OK);
}
private static InterestScoreResponse MapToResponse(InterestScore score) => new()
{
CanonicalId = score.CanonicalId,
Score = score.Score,
Tier = score.Tier.ToString(),
Reasons = score.Reasons,
LastSeenInBuild = score.LastSeenInBuild,
ComputedAt = score.ComputedAt
};
}
#region Response DTOs
/// <summary>
/// Response for an interest score.
/// </summary>
public sealed record InterestScoreResponse
{
public Guid CanonicalId { get; init; }
public double Score { get; init; }
public required string Tier { get; init; }
public IReadOnlyList<string> Reasons { get; init; } = [];
public Guid? LastSeenInBuild { get; init; }
public DateTimeOffset ComputedAt { get; init; }
}
/// <summary>
/// Response for a list of interest scores.
/// </summary>
public sealed record InterestScoreListResponse
{
public IReadOnlyList<InterestScoreResponse> Items { get; init; } = [];
public long TotalCount { get; init; }
public int Offset { get; init; }
public int Limit { get; init; }
}
/// <summary>
/// Response for score distribution.
/// </summary>
public sealed record ScoreDistributionResponse
{
public long HighCount { get; init; }
public long MediumCount { get; init; }
public long LowCount { get; init; }
public long NoneCount { get; init; }
public long TotalCount { get; init; }
public double AverageScore { get; init; }
public double MedianScore { get; init; }
}
/// <summary>
/// Response for recalculation operation.
/// </summary>
public sealed record RecalculateResponse
{
public int Updated { get; init; }
public required string Mode { get; init; }
public DateTimeOffset StartedAt { get; init; }
}
/// <summary>
/// Response for degradation operation.
/// </summary>
public sealed record DegradeResponse
{
public int Degraded { get; init; }
public double Threshold { get; init; }
public DateTimeOffset ExecutedAt { get; init; }
}
/// <summary>
/// Response for restoration operation.
/// </summary>
public sealed record RestoreResponse
{
public int Restored { get; init; }
public double Threshold { get; init; }
public DateTimeOffset ExecutedAt { get; init; }
}
#endregion
#region Request DTOs
/// <summary>
/// Request for recalculation operation.
/// </summary>
public sealed record RecalculateRequest
{
/// <summary>
/// Optional list of canonical IDs to recalculate.
/// If empty or null, full recalculation is performed.
/// </summary>
public IReadOnlyList<Guid>? CanonicalIds { get; init; }
}
/// <summary>
/// Request for degradation operation.
/// </summary>
public sealed record DegradeRequest
{
/// <summary>
/// Optional threshold override. If not specified, uses configured default.
/// </summary>
public double? Threshold { get; init; }
}
/// <summary>
/// Request for restoration operation.
/// </summary>
public sealed record RestoreRequest
{
/// <summary>
/// Optional threshold override. If not specified, uses configured default.
/// </summary>
public double? Threshold { get; init; }
}
#endregion

View File

@@ -0,0 +1,350 @@
// -----------------------------------------------------------------------------
// SbomEndpointExtensions.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Tasks: SBOM-8200-022 through SBOM-8200-024
// Description: API endpoints for SBOM registration and learning
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Mvc;
using StellaOps.Concelier.SbomIntegration;
using StellaOps.Concelier.SbomIntegration.Models;
using HttpResults = Microsoft.AspNetCore.Http.Results;
namespace StellaOps.Concelier.WebService.Extensions;
/// <summary>
/// Endpoint extensions for SBOM operations.
/// </summary>
internal static class SbomEndpointExtensions
{
public static void MapSbomEndpoints(this WebApplication app)
{
var group = app.MapGroup("/api/v1")
.WithTags("SBOM Learning");
// POST /api/v1/learn/sbom - Register and learn from an SBOM
group.MapPost("/learn/sbom", async (
[FromBody] LearnSbomRequest request,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
var input = new SbomRegistrationInput
{
Digest = request.SbomDigest,
Format = ParseSbomFormat(request.Format),
SpecVersion = request.SpecVersion ?? "1.6",
PrimaryName = request.PrimaryName,
PrimaryVersion = request.PrimaryVersion,
Purls = request.Purls,
Source = request.Source ?? "api",
TenantId = request.TenantId,
ReachabilityMap = request.ReachabilityMap,
DeploymentMap = request.DeploymentMap
};
var result = await registryService.LearnSbomAsync(input, ct).ConfigureAwait(false);
return HttpResults.Ok(new SbomLearnResponse
{
SbomDigest = result.Registration.Digest,
SbomId = result.Registration.Id,
ComponentsProcessed = result.Registration.ComponentCount,
AdvisoriesMatched = result.Matches.Count,
ScoresUpdated = result.ScoresUpdated,
ProcessingTimeMs = result.ProcessingTimeMs
});
})
.WithName("LearnSbom")
.WithSummary("Register SBOM and update interest scores for affected advisories")
.Produces<SbomLearnResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest);
// GET /api/v1/sboms/{digest}/affected - Get advisories affecting an SBOM
group.MapGet("/sboms/{digest}/affected", async (
string digest,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
var registration = await registryService.GetByDigestAsync(digest, ct).ConfigureAwait(false);
if (registration is null)
{
return HttpResults.NotFound(new { error = "SBOM not found", digest });
}
var matches = await registryService.GetMatchesAsync(digest, ct).ConfigureAwait(false);
return HttpResults.Ok(new SbomAffectedResponse
{
SbomDigest = digest,
SbomId = registration.Id,
PrimaryName = registration.PrimaryName,
PrimaryVersion = registration.PrimaryVersion,
ComponentCount = registration.ComponentCount,
AffectedCount = matches.Count,
Matches = matches.Select(m => new SbomMatchInfo
{
CanonicalId = m.CanonicalId,
Purl = m.Purl,
IsReachable = m.IsReachable,
IsDeployed = m.IsDeployed,
Confidence = m.Confidence,
Method = m.Method.ToString(),
MatchedAt = m.MatchedAt
}).ToList()
});
})
.WithName("GetSbomAffected")
.WithSummary("Get advisories affecting an SBOM")
.Produces<SbomAffectedResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// GET /api/v1/sboms - List registered SBOMs
group.MapGet("/sboms", async (
[FromQuery] int? offset,
[FromQuery] int? limit,
[FromQuery] string? tenantId,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
var registrations = await registryService.ListAsync(
offset ?? 0,
limit ?? 50,
tenantId,
ct).ConfigureAwait(false);
var count = await registryService.CountAsync(tenantId, ct).ConfigureAwait(false);
return HttpResults.Ok(new SbomListResponse
{
Items = registrations.Select(r => new SbomSummary
{
Id = r.Id,
Digest = r.Digest,
Format = r.Format.ToString(),
PrimaryName = r.PrimaryName,
PrimaryVersion = r.PrimaryVersion,
ComponentCount = r.ComponentCount,
AffectedCount = r.AffectedCount,
RegisteredAt = r.RegisteredAt,
LastMatchedAt = r.LastMatchedAt
}).ToList(),
TotalCount = count,
Offset = offset ?? 0,
Limit = limit ?? 50
});
})
.WithName("ListSboms")
.WithSummary("List registered SBOMs with pagination")
.Produces<SbomListResponse>(StatusCodes.Status200OK);
// GET /api/v1/sboms/{digest} - Get SBOM registration details
group.MapGet("/sboms/{digest}", async (
string digest,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
var registration = await registryService.GetByDigestAsync(digest, ct).ConfigureAwait(false);
if (registration is null)
{
return HttpResults.NotFound(new { error = "SBOM not found", digest });
}
return HttpResults.Ok(new SbomDetailResponse
{
Id = registration.Id,
Digest = registration.Digest,
Format = registration.Format.ToString(),
SpecVersion = registration.SpecVersion,
PrimaryName = registration.PrimaryName,
PrimaryVersion = registration.PrimaryVersion,
ComponentCount = registration.ComponentCount,
AffectedCount = registration.AffectedCount,
Source = registration.Source,
TenantId = registration.TenantId,
RegisteredAt = registration.RegisteredAt,
LastMatchedAt = registration.LastMatchedAt
});
})
.WithName("GetSbom")
.WithSummary("Get SBOM registration details")
.Produces<SbomDetailResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// DELETE /api/v1/sboms/{digest} - Unregister an SBOM
group.MapDelete("/sboms/{digest}", async (
string digest,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
await registryService.UnregisterAsync(digest, ct).ConfigureAwait(false);
return HttpResults.NoContent();
})
.WithName("UnregisterSbom")
.WithSummary("Unregister an SBOM")
.Produces(StatusCodes.Status204NoContent);
// POST /api/v1/sboms/{digest}/rematch - Rematch SBOM against current advisories
group.MapPost("/sboms/{digest}/rematch", async (
string digest,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
try
{
var result = await registryService.RematchSbomAsync(digest, ct).ConfigureAwait(false);
return HttpResults.Ok(new SbomRematchResponse
{
SbomDigest = digest,
PreviousAffectedCount = result.Registration.AffectedCount,
NewAffectedCount = result.Matches.Count,
ProcessingTimeMs = result.ProcessingTimeMs
});
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return HttpResults.NotFound(new { error = ex.Message });
}
})
.WithName("RematchSbom")
.WithSummary("Re-match SBOM against current advisory database")
.Produces<SbomRematchResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// GET /api/v1/sboms/stats - Get SBOM registry statistics
group.MapGet("/sboms/stats", async (
[FromQuery] string? tenantId,
ISbomRegistryService registryService,
CancellationToken ct) =>
{
var stats = await registryService.GetStatsAsync(tenantId, ct).ConfigureAwait(false);
return HttpResults.Ok(new SbomStatsResponse
{
TotalSboms = stats.TotalSboms,
TotalPurls = stats.TotalPurls,
TotalMatches = stats.TotalMatches,
AffectedSboms = stats.AffectedSboms,
AverageMatchesPerSbom = stats.AverageMatchesPerSbom
});
})
.WithName("GetSbomStats")
.WithSummary("Get SBOM registry statistics")
.Produces<SbomStatsResponse>(StatusCodes.Status200OK);
}
private static SbomFormat ParseSbomFormat(string? format)
{
return format?.ToLowerInvariant() switch
{
"cyclonedx" => SbomFormat.CycloneDX,
"spdx" => SbomFormat.SPDX,
_ => SbomFormat.CycloneDX
};
}
}
#region Request/Response DTOs
public sealed record LearnSbomRequest
{
public required string SbomDigest { get; init; }
public string? Format { get; init; }
public string? SpecVersion { get; init; }
public string? PrimaryName { get; init; }
public string? PrimaryVersion { get; init; }
public required IReadOnlyList<string> Purls { get; init; }
public string? Source { get; init; }
public string? TenantId { get; init; }
public IReadOnlyDictionary<string, bool>? ReachabilityMap { get; init; }
public IReadOnlyDictionary<string, bool>? DeploymentMap { get; init; }
}
public sealed record SbomLearnResponse
{
public required string SbomDigest { get; init; }
public Guid SbomId { get; init; }
public int ComponentsProcessed { get; init; }
public int AdvisoriesMatched { get; init; }
public int ScoresUpdated { get; init; }
public double ProcessingTimeMs { get; init; }
}
public sealed record SbomAffectedResponse
{
public required string SbomDigest { get; init; }
public Guid SbomId { get; init; }
public string? PrimaryName { get; init; }
public string? PrimaryVersion { get; init; }
public int ComponentCount { get; init; }
public int AffectedCount { get; init; }
public required IReadOnlyList<SbomMatchInfo> Matches { get; init; }
}
public sealed record SbomMatchInfo
{
public Guid CanonicalId { get; init; }
public required string Purl { get; init; }
public bool IsReachable { get; init; }
public bool IsDeployed { get; init; }
public double Confidence { get; init; }
public required string Method { get; init; }
public DateTimeOffset MatchedAt { get; init; }
}
public sealed record SbomListResponse
{
public required IReadOnlyList<SbomSummary> Items { get; init; }
public long TotalCount { get; init; }
public int Offset { get; init; }
public int Limit { get; init; }
}
public sealed record SbomSummary
{
public Guid Id { get; init; }
public required string Digest { get; init; }
public required string Format { get; init; }
public string? PrimaryName { get; init; }
public string? PrimaryVersion { get; init; }
public int ComponentCount { get; init; }
public int AffectedCount { get; init; }
public DateTimeOffset RegisteredAt { get; init; }
public DateTimeOffset? LastMatchedAt { get; init; }
}
public sealed record SbomDetailResponse
{
public Guid Id { get; init; }
public required string Digest { get; init; }
public required string Format { get; init; }
public required string SpecVersion { get; init; }
public string? PrimaryName { get; init; }
public string? PrimaryVersion { get; init; }
public int ComponentCount { get; init; }
public int AffectedCount { get; init; }
public required string Source { get; init; }
public string? TenantId { get; init; }
public DateTimeOffset RegisteredAt { get; init; }
public DateTimeOffset? LastMatchedAt { get; init; }
}
public sealed record SbomRematchResponse
{
public required string SbomDigest { get; init; }
public int PreviousAffectedCount { get; init; }
public int NewAffectedCount { get; init; }
public double ProcessingTimeMs { get; init; }
}
public sealed record SbomStatsResponse
{
public long TotalSboms { get; init; }
public long TotalPurls { get; init; }
public long TotalMatches { get; init; }
public long AffectedSboms { get; init; }
public double AverageMatchesPerSbom { get; init; }
}
#endregion

View File

@@ -38,6 +38,12 @@ public sealed class ConcelierOptions
/// </summary>
public AirGapOptions AirGap { get; set; } = new();
/// <summary>
/// Federation sync configuration.
/// Per SPRINT_8200_0014_0002_CONCEL_delta_bundle_export.
/// </summary>
public FederationOptions Federation { get; set; } = new();
/// <summary>
/// Stella Router integration configuration (disabled by default).
/// When enabled, ASP.NET endpoints are automatically registered with the Router.
@@ -266,4 +272,35 @@ public sealed class ConcelierOptions
[JsonIgnore]
public string RootAbsolute { get; internal set; } = string.Empty;
}
/// <summary>
/// Federation sync options for multi-site deployment.
/// </summary>
public sealed class FederationOptions
{
/// <summary>
/// Enable federation endpoints.
/// </summary>
public bool Enabled { get; set; }
/// <summary>
/// Site identifier for this instance.
/// </summary>
public string SiteId { get; set; } = "default";
/// <summary>
/// Default ZST compression level (1-19).
/// </summary>
public int DefaultCompressionLevel { get; set; } = 3;
/// <summary>
/// Default maximum items per export bundle.
/// </summary>
public int DefaultMaxItems { get; set; } = 10_000;
/// <summary>
/// Require bundle signatures.
/// </summary>
public bool RequireSignature { get; set; } = true;
}
}

View File

@@ -305,6 +305,21 @@ public static class ConcelierProblemResultFactory
"AirGap mode is not enabled on this instance.");
}
/// <summary>
/// Creates a 503 Service Unavailable response for Federation disabled.
/// Per SPRINT_8200_0014_0002_CONCEL_delta_bundle_export.
/// </summary>
public static IResult FederationDisabled(HttpContext context)
{
return Problem(
context,
"https://stellaops.org/problems/federation-disabled",
"Federation disabled",
StatusCodes.Status503ServiceUnavailable,
ErrorCodes.FederationDisabled,
"Federation sync is not enabled on this instance.");
}
/// <summary>
/// Creates a 403 Forbidden response for sealed mode violation.
/// </summary>

View File

@@ -23,7 +23,10 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Interest/StellaOps.Concelier.Interest.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.SbomIntegration/StellaOps.Concelier.SbomIntegration.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Storage.Postgres/StellaOps.Concelier.Storage.Postgres.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Federation/StellaOps.Concelier.Federation.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />

View File

@@ -0,0 +1,218 @@
// -----------------------------------------------------------------------------
// AdvisoryCacheKeys.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-004, VCACHE-8200-005, VCACHE-8200-006, VCACHE-8200-007, VCACHE-8200-008
// Description: Key schema for Concelier Valkey cache
// -----------------------------------------------------------------------------
using System.Text;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Static class for generating Valkey cache keys for canonical advisories.
/// </summary>
/// <remarks>
/// Key Schema:
/// <code>
/// advisory:{merge_hash} → JSON(CanonicalAdvisory) - TTL based on interest_score
/// rank:hot → ZSET { merge_hash: interest_score } - max 10,000 entries
/// by:purl:{normalized_purl} → SET { merge_hash, ... } - TTL 24h
/// by:cve:{cve_id} → STRING merge_hash - TTL 24h
/// cache:stats:hits → INCR counter
/// cache:stats:misses → INCR counter
/// cache:warmup:last → STRING ISO8601 timestamp
/// </code>
/// </remarks>
public static class AdvisoryCacheKeys
{
/// <summary>
/// Default key prefix for all cache keys.
/// </summary>
public const string DefaultPrefix = "concelier:";
/// <summary>
/// Key for advisory by merge hash.
/// Format: {prefix}advisory:{mergeHash}
/// </summary>
public static string Advisory(string mergeHash, string prefix = DefaultPrefix)
=> $"{prefix}advisory:{mergeHash}";
/// <summary>
/// Key for the hot advisory sorted set.
/// Format: {prefix}rank:hot
/// </summary>
public static string HotSet(string prefix = DefaultPrefix)
=> $"{prefix}rank:hot";
/// <summary>
/// Key for PURL index set.
/// Format: {prefix}by:purl:{normalizedPurl}
/// </summary>
/// <param name="purl">The PURL (will be normalized).</param>
/// <param name="prefix">Key prefix.</param>
public static string ByPurl(string purl, string prefix = DefaultPrefix)
=> $"{prefix}by:purl:{NormalizePurl(purl)}";
/// <summary>
/// Key for CVE mapping.
/// Format: {prefix}by:cve:{cveId}
/// </summary>
/// <param name="cve">The CVE identifier (case-insensitive).</param>
/// <param name="prefix">Key prefix.</param>
public static string ByCve(string cve, string prefix = DefaultPrefix)
=> $"{prefix}by:cve:{cve.ToUpperInvariant()}";
/// <summary>
/// Key for cache hit counter.
/// Format: {prefix}cache:stats:hits
/// </summary>
public static string StatsHits(string prefix = DefaultPrefix)
=> $"{prefix}cache:stats:hits";
/// <summary>
/// Key for cache miss counter.
/// Format: {prefix}cache:stats:misses
/// </summary>
public static string StatsMisses(string prefix = DefaultPrefix)
=> $"{prefix}cache:stats:misses";
/// <summary>
/// Key for last warmup timestamp.
/// Format: {prefix}cache:warmup:last
/// </summary>
public static string WarmupLast(string prefix = DefaultPrefix)
=> $"{prefix}cache:warmup:last";
/// <summary>
/// Key for warmup lock (for distributed coordination).
/// Format: {prefix}cache:warmup:lock
/// </summary>
public static string WarmupLock(string prefix = DefaultPrefix)
=> $"{prefix}cache:warmup:lock";
/// <summary>
/// Key for total cached advisories gauge.
/// Format: {prefix}cache:stats:count
/// </summary>
public static string StatsCount(string prefix = DefaultPrefix)
=> $"{prefix}cache:stats:count";
/// <summary>
/// Pattern to match all advisory keys (for scanning/cleanup).
/// Format: {prefix}advisory:*
/// </summary>
public static string AdvisoryPattern(string prefix = DefaultPrefix)
=> $"{prefix}advisory:*";
/// <summary>
/// Pattern to match all PURL index keys (for scanning/cleanup).
/// Format: {prefix}by:purl:*
/// </summary>
public static string PurlIndexPattern(string prefix = DefaultPrefix)
=> $"{prefix}by:purl:*";
/// <summary>
/// Pattern to match all CVE mapping keys (for scanning/cleanup).
/// Format: {prefix}by:cve:*
/// </summary>
public static string CveMappingPattern(string prefix = DefaultPrefix)
=> $"{prefix}by:cve:*";
/// <summary>
/// Normalizes a PURL for use as a cache key.
/// </summary>
/// <param name="purl">The PURL to normalize.</param>
/// <returns>Normalized PURL safe for use in cache keys.</returns>
/// <remarks>
/// Normalization:
/// 1. Lowercase the entire PURL
/// 2. Replace special characters that may cause issues in keys
/// 3. Truncate very long PURLs to prevent oversized keys
/// </remarks>
public static string NormalizePurl(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return string.Empty;
}
// Normalize to lowercase
var normalized = purl.ToLowerInvariant();
// Replace characters that could cause issues in Redis keys
// Redis keys should avoid spaces and some special chars for simplicity
var sb = new StringBuilder(normalized.Length);
foreach (var c in normalized)
{
// Allow alphanumeric, standard PURL chars: : / @ . - _ %
if (char.IsLetterOrDigit(c) ||
c is ':' or '/' or '@' or '.' or '-' or '_' or '%')
{
sb.Append(c);
}
else
{
// Replace other chars with underscore
sb.Append('_');
}
}
// Truncate if too long (Redis keys can be up to 512MB, but we want reasonable sizes)
const int MaxKeyLength = 500;
if (sb.Length > MaxKeyLength)
{
return sb.ToString(0, MaxKeyLength);
}
return sb.ToString();
}
/// <summary>
/// Extracts the merge hash from an advisory key.
/// </summary>
/// <param name="key">The full advisory key.</param>
/// <param name="prefix">The key prefix used.</param>
/// <returns>The merge hash, or null if key doesn't match expected format.</returns>
public static string? ExtractMergeHash(string key, string prefix = DefaultPrefix)
{
var expectedStart = $"{prefix}advisory:";
if (key.StartsWith(expectedStart, StringComparison.Ordinal))
{
return key[expectedStart.Length..];
}
return null;
}
/// <summary>
/// Extracts the PURL from a PURL index key.
/// </summary>
/// <param name="key">The full PURL index key.</param>
/// <param name="prefix">The key prefix used.</param>
/// <returns>The normalized PURL, or null if key doesn't match expected format.</returns>
public static string? ExtractPurl(string key, string prefix = DefaultPrefix)
{
var expectedStart = $"{prefix}by:purl:";
if (key.StartsWith(expectedStart, StringComparison.Ordinal))
{
return key[expectedStart.Length..];
}
return null;
}
/// <summary>
/// Extracts the CVE from a CVE mapping key.
/// </summary>
/// <param name="key">The full CVE mapping key.</param>
/// <param name="prefix">The key prefix used.</param>
/// <returns>The CVE identifier, or null if key doesn't match expected format.</returns>
public static string? ExtractCve(string key, string prefix = DefaultPrefix)
{
var expectedStart = $"{prefix}by:cve:";
if (key.StartsWith(expectedStart, StringComparison.Ordinal))
{
return key[expectedStart.Length..];
}
return null;
}
}

View File

@@ -0,0 +1,64 @@
// -----------------------------------------------------------------------------
// CacheWarmupHostedService.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-024
// Description: Background service for cache warmup on startup
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Background hosted service that warms the advisory cache on application startup.
/// </summary>
public sealed class CacheWarmupHostedService : BackgroundService
{
private readonly IAdvisoryCacheService _cacheService;
private readonly ConcelierCacheOptions _options;
private readonly ILogger<CacheWarmupHostedService>? _logger;
/// <summary>
/// Initializes a new instance of <see cref="CacheWarmupHostedService"/>.
/// </summary>
public CacheWarmupHostedService(
IAdvisoryCacheService cacheService,
IOptions<ConcelierCacheOptions> options,
ILogger<CacheWarmupHostedService>? logger = null)
{
_cacheService = cacheService;
_options = options.Value;
_logger = logger;
}
/// <inheritdoc />
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.Enabled || !_options.EnableWarmup)
{
_logger?.LogInformation("Cache warmup is disabled");
return;
}
// Wait a short time for the application to fully start
await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false);
_logger?.LogInformation("Starting cache warmup with limit {Limit}", _options.WarmupLimit);
try
{
await _cacheService.WarmupAsync(_options.WarmupLimit, stoppingToken).ConfigureAwait(false);
_logger?.LogInformation("Cache warmup completed successfully");
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
_logger?.LogInformation("Cache warmup cancelled");
}
catch (Exception ex)
{
_logger?.LogError(ex, "Cache warmup failed");
}
}
}

View File

@@ -0,0 +1,173 @@
// -----------------------------------------------------------------------------
// ConcelierCacheConnectionFactory.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-003
// Description: Connection factory for Concelier Valkey cache
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StackExchange.Redis;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Factory for creating and managing Valkey/Redis connections for the Concelier cache.
/// Thread-safe with lazy connection initialization and automatic reconnection.
/// </summary>
public sealed class ConcelierCacheConnectionFactory : IAsyncDisposable
{
private readonly ConcelierCacheOptions _options;
private readonly ILogger<ConcelierCacheConnectionFactory>? _logger;
private readonly SemaphoreSlim _connectionLock = new(1, 1);
private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory;
private IConnectionMultiplexer? _connection;
private bool _disposed;
/// <summary>
/// Initializes a new instance of <see cref="ConcelierCacheConnectionFactory"/>.
/// </summary>
/// <param name="options">Cache configuration options.</param>
/// <param name="logger">Optional logger.</param>
/// <param name="connectionFactory">Optional connection factory for testing.</param>
public ConcelierCacheConnectionFactory(
IOptions<ConcelierCacheOptions> options,
ILogger<ConcelierCacheConnectionFactory>? logger = null,
Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null)
{
_options = options.Value;
_logger = logger;
_connectionFactory = connectionFactory ??
(config => Task.FromResult<IConnectionMultiplexer>(ConnectionMultiplexer.Connect(config)));
}
/// <summary>
/// Gets whether caching is enabled.
/// </summary>
public bool IsEnabled => _options.Enabled;
/// <summary>
/// Gets a database connection for cache operations.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The Valkey database.</returns>
public async ValueTask<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken = default)
{
var connection = await GetConnectionAsync(cancellationToken).ConfigureAwait(false);
return connection.GetDatabase(_options.Database);
}
/// <summary>
/// Gets the underlying connection multiplexer.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The connection multiplexer.</returns>
public async ValueTask<IConnectionMultiplexer> GetConnectionAsync(CancellationToken cancellationToken = default)
{
ObjectDisposedException.ThrowIf(_disposed, this);
if (_connection is not null && _connection.IsConnected)
{
return _connection;
}
await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_connection is null || !_connection.IsConnected)
{
if (_connection is not null)
{
_logger?.LogDebug("Reconnecting to Valkey (previous connection lost)");
await _connection.CloseAsync().ConfigureAwait(false);
_connection.Dispose();
}
var config = ConfigurationOptions.Parse(_options.ConnectionString);
config.AbortOnConnectFail = _options.AbortOnConnectFail;
config.ConnectTimeout = (int)_options.ConnectTimeout.TotalMilliseconds;
config.SyncTimeout = (int)_options.SyncTimeout.TotalMilliseconds;
config.AsyncTimeout = (int)_options.AsyncTimeout.TotalMilliseconds;
config.ConnectRetry = _options.ConnectRetry;
config.DefaultDatabase = _options.Database;
_logger?.LogDebug("Connecting to Valkey at {Endpoint} (database {Database})",
_options.ConnectionString, _options.Database);
_connection = await _connectionFactory(config).ConfigureAwait(false);
_logger?.LogInformation("Connected to Valkey for Concelier cache");
}
}
finally
{
_connectionLock.Release();
}
return _connection;
}
/// <summary>
/// Tests the connection by sending a PING command.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if connection is healthy.</returns>
public async ValueTask<bool> PingAsync(CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return false;
}
try
{
var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var pong = await db.PingAsync().ConfigureAwait(false);
_logger?.LogDebug("Valkey PING response: {Latency}ms", pong.TotalMilliseconds);
return true;
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Valkey PING failed");
return false;
}
}
/// <summary>
/// Gets the key prefix for cache keys.
/// </summary>
public string KeyPrefix => _options.KeyPrefix;
/// <summary>
/// Gets the maximum hot set size.
/// </summary>
public int MaxHotSetSize => _options.MaxHotSetSize;
/// <summary>
/// Gets the TTL policy.
/// </summary>
public CacheTtlPolicy TtlPolicy => _options.TtlPolicy;
/// <summary>
/// Disposes the connection factory and releases the connection.
/// </summary>
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_connection is not null)
{
_logger?.LogDebug("Closing Valkey connection");
await _connection.CloseAsync().ConfigureAwait(false);
_connection.Dispose();
}
_connectionLock.Dispose();
}
}

View File

@@ -0,0 +1,195 @@
// -----------------------------------------------------------------------------
// ConcelierCacheMetrics.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-027, VCACHE-8200-028
// Description: OpenTelemetry metrics for cache operations
// -----------------------------------------------------------------------------
using System.Diagnostics;
using System.Diagnostics.Metrics;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Metrics instrumentation for the Concelier advisory cache.
/// </summary>
public sealed class ConcelierCacheMetrics : IDisposable
{
/// <summary>
/// Activity source name for cache operations.
/// </summary>
public const string ActivitySourceName = "StellaOps.Concelier.Cache";
/// <summary>
/// Meter name for cache metrics.
/// </summary>
public const string MeterName = "StellaOps.Concelier.Cache";
private readonly Meter _meter;
private readonly Counter<long> _hitsCounter;
private readonly Counter<long> _missesCounter;
private readonly Counter<long> _evictionsCounter;
private readonly Histogram<double> _latencyHistogram;
private readonly ObservableGauge<long> _hotSetSizeGauge;
private long _lastKnownHotSetSize;
/// <summary>
/// Activity source for tracing cache operations.
/// </summary>
public static ActivitySource ActivitySource { get; } = new(ActivitySourceName, "1.0.0");
/// <summary>
/// Initializes a new instance of <see cref="ConcelierCacheMetrics"/>.
/// </summary>
public ConcelierCacheMetrics()
{
_meter = new Meter(MeterName, "1.0.0");
_hitsCounter = _meter.CreateCounter<long>(
"concelier_cache_hits_total",
unit: "{hits}",
description: "Total number of cache hits");
_missesCounter = _meter.CreateCounter<long>(
"concelier_cache_misses_total",
unit: "{misses}",
description: "Total number of cache misses");
_evictionsCounter = _meter.CreateCounter<long>(
"concelier_cache_evictions_total",
unit: "{evictions}",
description: "Total number of cache evictions");
_latencyHistogram = _meter.CreateHistogram<double>(
"concelier_cache_latency_ms",
unit: "ms",
description: "Cache operation latency in milliseconds");
_hotSetSizeGauge = _meter.CreateObservableGauge(
"concelier_cache_hot_set_size",
() => _lastKnownHotSetSize,
unit: "{entries}",
description: "Current number of entries in the hot advisory set");
}
/// <summary>
/// Records a cache hit.
/// </summary>
public void RecordHit() => _hitsCounter.Add(1);
/// <summary>
/// Records a cache miss.
/// </summary>
public void RecordMiss() => _missesCounter.Add(1);
/// <summary>
/// Records a cache eviction.
/// </summary>
/// <param name="reason">The reason for eviction.</param>
public void RecordEviction(string reason = "ttl")
{
_evictionsCounter.Add(1, new KeyValuePair<string, object?>("reason", reason));
}
/// <summary>
/// Records operation latency.
/// </summary>
/// <param name="milliseconds">Latency in milliseconds.</param>
/// <param name="operation">The operation type (get, set, invalidate).</param>
public void RecordLatency(double milliseconds, string operation)
{
_latencyHistogram.Record(milliseconds, new KeyValuePair<string, object?>("operation", operation));
}
/// <summary>
/// Updates the hot set size gauge.
/// </summary>
/// <param name="size">Current hot set size.</param>
public void UpdateHotSetSize(long size)
{
_lastKnownHotSetSize = size;
}
/// <summary>
/// Starts an activity for tracing a cache operation.
/// </summary>
/// <param name="operationName">Name of the operation.</param>
/// <returns>The activity, or null if tracing is disabled.</returns>
public static Activity? StartActivity(string operationName)
{
return ActivitySource.StartActivity(operationName, ActivityKind.Internal);
}
/// <summary>
/// Starts an activity with tags.
/// </summary>
/// <param name="operationName">Name of the operation.</param>
/// <param name="tags">Tags to add to the activity.</param>
/// <returns>The activity, or null if tracing is disabled.</returns>
public static Activity? StartActivity(string operationName, params (string Key, object? Value)[] tags)
{
var activity = ActivitySource.StartActivity(operationName, ActivityKind.Internal);
if (activity is not null)
{
foreach (var (key, value) in tags)
{
activity.SetTag(key, value);
}
}
return activity;
}
/// <inheritdoc />
public void Dispose()
{
_meter.Dispose();
ActivitySource.Dispose();
}
}
/// <summary>
/// Extension methods for timing cache operations.
/// </summary>
public static class CacheMetricsExtensions
{
/// <summary>
/// Times an async operation and records the latency.
/// </summary>
public static async Task<T> TimeAsync<T>(
this ConcelierCacheMetrics metrics,
string operation,
Func<Task<T>> action)
{
var sw = Stopwatch.StartNew();
try
{
return await action().ConfigureAwait(false);
}
finally
{
sw.Stop();
metrics.RecordLatency(sw.Elapsed.TotalMilliseconds, operation);
}
}
/// <summary>
/// Times an async operation and records the latency.
/// </summary>
public static async Task TimeAsync(
this ConcelierCacheMetrics metrics,
string operation,
Func<Task> action)
{
var sw = Stopwatch.StartNew();
try
{
await action().ConfigureAwait(false);
}
finally
{
sw.Stop();
metrics.RecordLatency(sw.Elapsed.TotalMilliseconds, operation);
}
}
}

View File

@@ -0,0 +1,145 @@
// -----------------------------------------------------------------------------
// ConcelierCacheOptions.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-002
// Description: Configuration options for Concelier Valkey cache
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Configuration options for the Concelier Valkey advisory cache.
/// </summary>
public sealed class ConcelierCacheOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Concelier:Cache";
/// <summary>
/// Whether Valkey caching is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Valkey connection string (e.g., "localhost:6379" or "valkey:6379,password=secret").
/// </summary>
public string ConnectionString { get; set; } = "localhost:6379";
/// <summary>
/// Valkey database number (0-15).
/// </summary>
public int Database { get; set; } = 1;
/// <summary>
/// Key prefix for all cache keys.
/// </summary>
public string KeyPrefix { get; set; } = "concelier:";
/// <summary>
/// Maximum hot set size.
/// </summary>
public int MaxHotSetSize { get; set; } = 10_000;
/// <summary>
/// Connection timeout.
/// </summary>
public TimeSpan ConnectTimeout { get; set; } = TimeSpan.FromSeconds(5);
/// <summary>
/// Synchronous operation timeout.
/// </summary>
public TimeSpan SyncTimeout { get; set; } = TimeSpan.FromMilliseconds(100);
/// <summary>
/// Async operation timeout.
/// </summary>
public TimeSpan AsyncTimeout { get; set; } = TimeSpan.FromMilliseconds(200);
/// <summary>
/// Number of connection retries.
/// </summary>
public int ConnectRetry { get; set; } = 3;
/// <summary>
/// Whether to abort on connect fail.
/// </summary>
public bool AbortOnConnectFail { get; set; } = false;
/// <summary>
/// TTL policy configuration.
/// </summary>
public CacheTtlPolicy TtlPolicy { get; set; } = new();
/// <summary>
/// Whether to enable cache warmup on startup.
/// </summary>
public bool EnableWarmup { get; set; } = true;
/// <summary>
/// Number of advisories to preload during warmup.
/// </summary>
public int WarmupLimit { get; set; } = 1000;
}
/// <summary>
/// TTL policy for cached advisories based on interest score.
/// </summary>
public sealed class CacheTtlPolicy
{
/// <summary>
/// TTL for high interest advisories (score >= 0.7).
/// </summary>
public TimeSpan HighScoreTtl { get; set; } = TimeSpan.FromHours(24);
/// <summary>
/// TTL for medium interest advisories (score >= 0.4).
/// </summary>
public TimeSpan MediumScoreTtl { get; set; } = TimeSpan.FromHours(4);
/// <summary>
/// TTL for low interest advisories (score < 0.4).
/// </summary>
public TimeSpan LowScoreTtl { get; set; } = TimeSpan.FromHours(1);
/// <summary>
/// Threshold for high interest score.
/// </summary>
public double HighScoreThreshold { get; set; } = 0.7;
/// <summary>
/// Threshold for medium interest score.
/// </summary>
public double MediumScoreThreshold { get; set; } = 0.4;
/// <summary>
/// TTL for PURL index entries.
/// </summary>
public TimeSpan PurlIndexTtl { get; set; } = TimeSpan.FromHours(24);
/// <summary>
/// TTL for CVE mapping entries.
/// </summary>
public TimeSpan CveMappingTtl { get; set; } = TimeSpan.FromHours(24);
/// <summary>
/// Gets the appropriate TTL for an advisory based on its interest score.
/// </summary>
/// <param name="score">Interest score (0.0 - 1.0), or null for default.</param>
/// <returns>TTL for the advisory.</returns>
public TimeSpan GetTtl(double? score)
{
if (!score.HasValue)
{
return LowScoreTtl;
}
return score.Value switch
{
>= 0.7 => HighScoreTtl, // High interest: 24h
>= 0.4 => MediumScoreTtl, // Medium interest: 4h
_ => LowScoreTtl // Low interest: 1h
};
}
}

View File

@@ -0,0 +1,155 @@
// -----------------------------------------------------------------------------
// IAdvisoryCacheService.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-010
// Description: Interface for Valkey-based canonical advisory caching
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Core.Canonical;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Valkey-based cache for canonical advisories.
/// Provides read-through caching with TTL based on interest score.
/// </summary>
public interface IAdvisoryCacheService
{
// === Read Operations ===
/// <summary>
/// Get canonical advisory by merge hash (cache-first).
/// </summary>
/// <param name="mergeHash">The merge hash identifying the canonical.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The cached advisory, or null if not found.</returns>
Task<CanonicalAdvisory?> GetAsync(string mergeHash, CancellationToken cancellationToken = default);
/// <summary>
/// Get canonical advisories by PURL (uses index).
/// </summary>
/// <param name="purl">The PURL to lookup.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of advisories affecting this PURL.</returns>
Task<IReadOnlyList<CanonicalAdvisory>> GetByPurlAsync(string purl, CancellationToken cancellationToken = default);
/// <summary>
/// Get canonical advisory by CVE (uses mapping).
/// </summary>
/// <param name="cve">The CVE identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The primary canonical for this CVE, or null.</returns>
Task<CanonicalAdvisory?> GetByCveAsync(string cve, CancellationToken cancellationToken = default);
/// <summary>
/// Get hot advisories (top N by interest score).
/// </summary>
/// <param name="limit">Maximum number to return.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of hot advisories in descending score order.</returns>
Task<IReadOnlyList<CanonicalAdvisory>> GetHotAsync(int limit = 100, CancellationToken cancellationToken = default);
// === Write Operations ===
/// <summary>
/// Cache canonical advisory with TTL based on interest score.
/// </summary>
/// <param name="advisory">The advisory to cache.</param>
/// <param name="interestScore">Optional interest score (0.0-1.0) for TTL calculation.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task SetAsync(CanonicalAdvisory advisory, double? interestScore = null, CancellationToken cancellationToken = default);
/// <summary>
/// Invalidate cached advisory.
/// </summary>
/// <param name="mergeHash">The merge hash to invalidate.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task InvalidateAsync(string mergeHash, CancellationToken cancellationToken = default);
/// <summary>
/// Update interest score (affects TTL and hot set membership).
/// </summary>
/// <param name="mergeHash">The merge hash to update.</param>
/// <param name="score">The new interest score (0.0-1.0).</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UpdateScoreAsync(string mergeHash, double score, CancellationToken cancellationToken = default);
// === Index Operations ===
/// <summary>
/// Add merge hash to PURL index.
/// </summary>
/// <param name="purl">The PURL to index.</param>
/// <param name="mergeHash">The merge hash to add.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task IndexPurlAsync(string purl, string mergeHash, CancellationToken cancellationToken = default);
/// <summary>
/// Remove merge hash from PURL index.
/// </summary>
/// <param name="purl">The PURL to unindex.</param>
/// <param name="mergeHash">The merge hash to remove.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UnindexPurlAsync(string purl, string mergeHash, CancellationToken cancellationToken = default);
/// <summary>
/// Set CVE to merge hash mapping.
/// </summary>
/// <param name="cve">The CVE identifier.</param>
/// <param name="mergeHash">The canonical merge hash.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task IndexCveAsync(string cve, string mergeHash, CancellationToken cancellationToken = default);
// === Maintenance ===
/// <summary>
/// Warm cache with hot advisories from database.
/// </summary>
/// <param name="limit">Maximum number of advisories to preload.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task WarmupAsync(int limit = 1000, CancellationToken cancellationToken = default);
/// <summary>
/// Get cache statistics.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Current cache statistics.</returns>
Task<CacheStatistics> GetStatisticsAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Check if the cache service is healthy.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if the cache is reachable and operational.</returns>
Task<bool> IsHealthyAsync(CancellationToken cancellationToken = default);
}
/// <summary>
/// Cache statistics for monitoring and debugging.
/// </summary>
public sealed record CacheStatistics
{
/// <summary>Total cache hits.</summary>
public long Hits { get; init; }
/// <summary>Total cache misses.</summary>
public long Misses { get; init; }
/// <summary>Cache hit rate (0.0-1.0).</summary>
public double HitRate => Hits + Misses > 0 ? (double)Hits / (Hits + Misses) : 0;
/// <summary>Current size of the hot advisory set.</summary>
public long HotSetSize { get; init; }
/// <summary>Approximate total cached advisories.</summary>
public long TotalCachedAdvisories { get; init; }
/// <summary>When the cache was last warmed up.</summary>
public DateTimeOffset? LastWarmup { get; init; }
/// <summary>Whether the cache service is currently healthy.</summary>
public bool IsHealthy { get; init; }
/// <summary>Valkey server info string.</summary>
public string? ServerInfo { get; init; }
}

View File

@@ -0,0 +1,169 @@
// -----------------------------------------------------------------------------
// ServiceCollectionExtensions.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-011
// Description: DI registration for Concelier Valkey cache services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Extension methods for registering Concelier cache services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds Concelier Valkey cache services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">The configuration root.</param>
/// <param name="enableWarmup">Whether to enable background cache warmup.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierValkeyCache(
this IServiceCollection services,
IConfiguration configuration,
bool enableWarmup = true)
{
// Bind options from configuration
services.Configure<ConcelierCacheOptions>(
configuration.GetSection(ConcelierCacheOptions.SectionName));
return AddCoreServices(services, enableWarmup);
}
/// <summary>
/// Adds Concelier Valkey cache services with custom options.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Action to configure options.</param>
/// <param name="enableWarmup">Whether to enable background cache warmup.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierValkeyCache(
this IServiceCollection services,
Action<ConcelierCacheOptions> configureOptions,
bool enableWarmup = true)
{
services.Configure(configureOptions);
return AddCoreServices(services, enableWarmup);
}
private static IServiceCollection AddCoreServices(IServiceCollection services, bool enableWarmup)
{
// Register connection factory as singleton (manages connection lifecycle)
services.TryAddSingleton<ConcelierCacheConnectionFactory>();
// Register metrics
services.TryAddSingleton<ConcelierCacheMetrics>();
// Register cache service
services.TryAddSingleton<IAdvisoryCacheService, ValkeyAdvisoryCacheService>();
// Register warmup hosted service if enabled
if (enableWarmup)
{
services.AddHostedService<CacheWarmupHostedService>();
}
return services;
}
/// <summary>
/// Decorates the registered <see cref="ICanonicalAdvisoryService"/> with Valkey caching.
/// Call this after registering the base ICanonicalAdvisoryService implementation.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddValkeyCachingDecorator(this IServiceCollection services)
{
// Find the existing ICanonicalAdvisoryService registration
var existingDescriptor = services.FirstOrDefault(
d => d.ServiceType == typeof(StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService));
if (existingDescriptor is null)
{
throw new InvalidOperationException(
"ICanonicalAdvisoryService must be registered before adding the Valkey caching decorator. " +
"Call AddConcelierCore() or register ICanonicalAdvisoryService first.");
}
// Remove the original registration
services.Remove(existingDescriptor);
// Re-register the original service with a different key for the decorator to use
if (existingDescriptor.ImplementationType is not null)
{
services.Add(new ServiceDescriptor(
existingDescriptor.ImplementationType,
existingDescriptor.ImplementationType,
existingDescriptor.Lifetime));
}
else if (existingDescriptor.ImplementationFactory is not null)
{
services.Add(new ServiceDescriptor(
typeof(StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService),
sp => existingDescriptor.ImplementationFactory(sp),
existingDescriptor.Lifetime));
}
// Register the decorator as the new ICanonicalAdvisoryService
services.Add(new ServiceDescriptor(
typeof(StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService),
sp =>
{
// Resolve the inner service (the original implementation)
StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService innerService;
if (existingDescriptor.ImplementationType is not null)
{
innerService = (StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService)
sp.GetRequiredService(existingDescriptor.ImplementationType);
}
else if (existingDescriptor.ImplementationFactory is not null)
{
innerService = (StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService)
existingDescriptor.ImplementationFactory(sp);
}
else if (existingDescriptor.ImplementationInstance is not null)
{
innerService = (StellaOps.Concelier.Core.Canonical.ICanonicalAdvisoryService)
existingDescriptor.ImplementationInstance;
}
else
{
throw new InvalidOperationException(
"Unable to resolve inner ICanonicalAdvisoryService for decorator.");
}
var cache = sp.GetRequiredService<IAdvisoryCacheService>();
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<ValkeyCanonicalAdvisoryService>>();
return new ValkeyCanonicalAdvisoryService(innerService, cache, logger);
},
existingDescriptor.Lifetime));
return services;
}
/// <summary>
/// Adds Valkey caching for canonical advisories, including the decorator.
/// This is a convenience method that combines AddConcelierValkeyCache and AddValkeyCachingDecorator.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">The configuration root.</param>
/// <param name="enableWarmup">Whether to enable background cache warmup.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierValkeyCacheWithDecorator(
this IServiceCollection services,
IConfiguration configuration,
bool enableWarmup = true)
{
services.AddConcelierValkeyCache(configuration, enableWarmup);
services.AddValkeyCachingDecorator();
return services;
}
}

View File

@@ -0,0 +1,30 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Concelier.Cache.Valkey</RootNamespace>
<AssemblyName>StellaOps.Concelier.Cache.Valkey</AssemblyName>
<Description>Valkey/Redis caching for Concelier canonical advisories</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
<PackageReference Include="System.Diagnostics.DiagnosticSource" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,493 @@
// -----------------------------------------------------------------------------
// ValkeyAdvisoryCacheService.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-011 to VCACHE-8200-016
// Description: Valkey implementation of advisory cache service
// -----------------------------------------------------------------------------
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StackExchange.Redis;
using StellaOps.Concelier.Core.Canonical;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Valkey-based implementation of the advisory cache service.
/// Provides read-through caching with TTL based on interest scores.
/// </summary>
public sealed class ValkeyAdvisoryCacheService : IAdvisoryCacheService
{
private readonly ConcelierCacheConnectionFactory _connectionFactory;
private readonly ConcelierCacheOptions _options;
private readonly ILogger<ValkeyAdvisoryCacheService>? _logger;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
/// <summary>
/// Initializes a new instance of <see cref="ValkeyAdvisoryCacheService"/>.
/// </summary>
public ValkeyAdvisoryCacheService(
ConcelierCacheConnectionFactory connectionFactory,
IOptions<ConcelierCacheOptions> options,
ILogger<ValkeyAdvisoryCacheService>? logger = null)
{
_connectionFactory = connectionFactory;
_options = options.Value;
_logger = logger;
}
/// <inheritdoc />
public async Task<CanonicalAdvisory?> GetAsync(string mergeHash, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return null;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var key = AdvisoryCacheKeys.Advisory(mergeHash, _options.KeyPrefix);
var cached = await db.StringGetAsync(key).ConfigureAwait(false);
if (cached.HasValue)
{
await db.StringIncrementAsync(AdvisoryCacheKeys.StatsHits(_options.KeyPrefix)).ConfigureAwait(false);
_logger?.LogDebug("Cache hit for advisory {MergeHash}", mergeHash);
return JsonSerializer.Deserialize<CanonicalAdvisory>((string)cached!, JsonOptions);
}
await db.StringIncrementAsync(AdvisoryCacheKeys.StatsMisses(_options.KeyPrefix)).ConfigureAwait(false);
_logger?.LogDebug("Cache miss for advisory {MergeHash}", mergeHash);
return null;
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to get advisory {MergeHash} from cache", mergeHash);
return null;
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<CanonicalAdvisory>> GetByPurlAsync(string purl, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return [];
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var indexKey = AdvisoryCacheKeys.ByPurl(purl, _options.KeyPrefix);
// Get all merge hashes for this PURL
var mergeHashes = await db.SetMembersAsync(indexKey).ConfigureAwait(false);
if (mergeHashes.Length == 0)
{
return [];
}
// Refresh TTL on access
await db.KeyExpireAsync(indexKey, _options.TtlPolicy.PurlIndexTtl).ConfigureAwait(false);
// Fetch all advisories
var results = new List<CanonicalAdvisory>(mergeHashes.Length);
foreach (var hash in mergeHashes)
{
var advisory = await GetAsync(hash!, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
results.Add(advisory);
}
}
return results;
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to get advisories for PURL {Purl}", purl);
return [];
}
}
/// <inheritdoc />
public async Task<CanonicalAdvisory?> GetByCveAsync(string cve, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return null;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var mappingKey = AdvisoryCacheKeys.ByCve(cve, _options.KeyPrefix);
var mergeHash = await db.StringGetAsync(mappingKey).ConfigureAwait(false);
if (!mergeHash.HasValue)
{
return null;
}
return await GetAsync(mergeHash!, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to get advisory for CVE {Cve}", cve);
return null;
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<CanonicalAdvisory>> GetHotAsync(int limit = 100, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return [];
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var hotKey = AdvisoryCacheKeys.HotSet(_options.KeyPrefix);
// Get top N merge hashes by score (descending)
var entries = await db.SortedSetRangeByRankAsync(
hotKey,
start: 0,
stop: limit - 1,
order: Order.Descending).ConfigureAwait(false);
if (entries.Length == 0)
{
return [];
}
// Fetch all advisories
var results = new List<CanonicalAdvisory>(entries.Length);
foreach (var mergeHash in entries)
{
var advisory = await GetAsync(mergeHash!, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
results.Add(advisory);
}
}
return results;
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to get hot advisories");
return [];
}
}
/// <inheritdoc />
public async Task SetAsync(CanonicalAdvisory advisory, double? interestScore = null, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var key = AdvisoryCacheKeys.Advisory(advisory.MergeHash, _options.KeyPrefix);
var json = JsonSerializer.Serialize(advisory, JsonOptions);
var ttl = _options.TtlPolicy.GetTtl(interestScore);
await db.StringSetAsync(key, json, ttl).ConfigureAwait(false);
_logger?.LogDebug("Cached advisory {MergeHash} with TTL {Ttl}", advisory.MergeHash, ttl);
// Update hot set if score provided
if (interestScore.HasValue)
{
await UpdateScoreAsync(advisory.MergeHash, interestScore.Value, cancellationToken).ConfigureAwait(false);
}
// Index CVE mapping
if (!string.IsNullOrWhiteSpace(advisory.Cve))
{
await IndexCveAsync(advisory.Cve, advisory.MergeHash, cancellationToken).ConfigureAwait(false);
}
// Index by PURL (affects key)
if (!string.IsNullOrWhiteSpace(advisory.AffectsKey))
{
await IndexPurlAsync(advisory.AffectsKey, advisory.MergeHash, cancellationToken).ConfigureAwait(false);
}
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to cache advisory {MergeHash}", advisory.MergeHash);
}
}
/// <inheritdoc />
public async Task InvalidateAsync(string mergeHash, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
// Remove from advisory cache
var key = AdvisoryCacheKeys.Advisory(mergeHash, _options.KeyPrefix);
await db.KeyDeleteAsync(key).ConfigureAwait(false);
// Remove from hot set
var hotKey = AdvisoryCacheKeys.HotSet(_options.KeyPrefix);
await db.SortedSetRemoveAsync(hotKey, mergeHash).ConfigureAwait(false);
_logger?.LogDebug("Invalidated advisory {MergeHash}", mergeHash);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to invalidate advisory {MergeHash}", mergeHash);
}
}
/// <inheritdoc />
public async Task UpdateScoreAsync(string mergeHash, double score, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var hotKey = AdvisoryCacheKeys.HotSet(_options.KeyPrefix);
// Add/update in hot set
await db.SortedSetAddAsync(hotKey, mergeHash, score).ConfigureAwait(false);
// Trim to max size
var currentSize = await db.SortedSetLengthAsync(hotKey).ConfigureAwait(false);
if (currentSize > _options.MaxHotSetSize)
{
// Remove lowest scoring entries
await db.SortedSetRemoveRangeByRankAsync(
hotKey,
start: 0,
stop: currentSize - _options.MaxHotSetSize - 1).ConfigureAwait(false);
}
// Update advisory TTL if cached
var advisoryKey = AdvisoryCacheKeys.Advisory(mergeHash, _options.KeyPrefix);
if (await db.KeyExistsAsync(advisoryKey).ConfigureAwait(false))
{
var ttl = _options.TtlPolicy.GetTtl(score);
await db.KeyExpireAsync(advisoryKey, ttl).ConfigureAwait(false);
}
_logger?.LogDebug("Updated score for {MergeHash} to {Score}", mergeHash, score);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to update score for {MergeHash}", mergeHash);
}
}
/// <inheritdoc />
public async Task IndexPurlAsync(string purl, string mergeHash, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var indexKey = AdvisoryCacheKeys.ByPurl(purl, _options.KeyPrefix);
await db.SetAddAsync(indexKey, mergeHash).ConfigureAwait(false);
await db.KeyExpireAsync(indexKey, _options.TtlPolicy.PurlIndexTtl).ConfigureAwait(false);
_logger?.LogDebug("Indexed PURL {Purl} -> {MergeHash}", purl, mergeHash);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to index PURL {Purl}", purl);
}
}
/// <inheritdoc />
public async Task UnindexPurlAsync(string purl, string mergeHash, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var indexKey = AdvisoryCacheKeys.ByPurl(purl, _options.KeyPrefix);
await db.SetRemoveAsync(indexKey, mergeHash).ConfigureAwait(false);
_logger?.LogDebug("Unindexed PURL {Purl} -> {MergeHash}", purl, mergeHash);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to unindex PURL {Purl}", purl);
}
}
/// <inheritdoc />
public async Task IndexCveAsync(string cve, string mergeHash, CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return;
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var mappingKey = AdvisoryCacheKeys.ByCve(cve, _options.KeyPrefix);
await db.StringSetAsync(mappingKey, mergeHash, _options.TtlPolicy.CveMappingTtl).ConfigureAwait(false);
_logger?.LogDebug("Indexed CVE {Cve} -> {MergeHash}", cve, mergeHash);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to index CVE {Cve}", cve);
}
}
/// <inheritdoc />
public async Task WarmupAsync(int limit = 1000, CancellationToken cancellationToken = default)
{
if (!_options.Enabled || !_options.EnableWarmup)
{
return;
}
var sw = Stopwatch.StartNew();
_logger?.LogInformation("Starting cache warmup (limit: {Limit})", limit);
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
// Try to acquire warmup lock (prevent concurrent warmups)
var lockKey = AdvisoryCacheKeys.WarmupLock(_options.KeyPrefix);
var lockAcquired = await db.StringSetAsync(
lockKey,
"warming",
TimeSpan.FromMinutes(10),
When.NotExists).ConfigureAwait(false);
if (!lockAcquired)
{
_logger?.LogDebug("Warmup already in progress, skipping");
return;
}
try
{
// Record warmup timestamp
var warmupKey = AdvisoryCacheKeys.WarmupLast(_options.KeyPrefix);
await db.StringSetAsync(warmupKey, DateTimeOffset.UtcNow.ToString("o")).ConfigureAwait(false);
// Note: Actual warmup would load from ICanonicalAdvisoryStore
// This is a placeholder - the actual implementation would be in the integration layer
_logger?.LogInformation("Cache warmup completed in {Elapsed}ms", sw.ElapsedMilliseconds);
}
finally
{
// Release lock
await db.KeyDeleteAsync(lockKey).ConfigureAwait(false);
}
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Cache warmup failed after {Elapsed}ms", sw.ElapsedMilliseconds);
}
}
/// <inheritdoc />
public async Task<CacheStatistics> GetStatisticsAsync(CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return new CacheStatistics { IsHealthy = false };
}
try
{
var db = await _connectionFactory.GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var hitsKey = AdvisoryCacheKeys.StatsHits(_options.KeyPrefix);
var missesKey = AdvisoryCacheKeys.StatsMisses(_options.KeyPrefix);
var hotKey = AdvisoryCacheKeys.HotSet(_options.KeyPrefix);
var warmupKey = AdvisoryCacheKeys.WarmupLast(_options.KeyPrefix);
var hits = (long)(await db.StringGetAsync(hitsKey).ConfigureAwait(false));
var misses = (long)(await db.StringGetAsync(missesKey).ConfigureAwait(false));
var hotSetSize = await db.SortedSetLengthAsync(hotKey).ConfigureAwait(false);
DateTimeOffset? lastWarmup = null;
var warmupStr = await db.StringGetAsync(warmupKey).ConfigureAwait(false);
if (warmupStr.HasValue && DateTimeOffset.TryParse(warmupStr, out var parsed))
{
lastWarmup = parsed;
}
// Get server info
var connection = await _connectionFactory.GetConnectionAsync(cancellationToken).ConfigureAwait(false);
var server = connection.GetServer(connection.GetEndPoints().First());
var info = (await server.InfoAsync().ConfigureAwait(false))
.FirstOrDefault(g => g.Key == "Server")?
.FirstOrDefault(e => e.Key == "redis_version")
.Value;
return new CacheStatistics
{
Hits = hits,
Misses = misses,
HotSetSize = hotSetSize,
TotalCachedAdvisories = hotSetSize, // Approximation
LastWarmup = lastWarmup,
IsHealthy = true,
ServerInfo = info
};
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to get cache statistics");
return new CacheStatistics { IsHealthy = false };
}
}
/// <inheritdoc />
public async Task<bool> IsHealthyAsync(CancellationToken cancellationToken = default)
{
if (!_options.Enabled)
{
return false;
}
return await _connectionFactory.PingAsync(cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,335 @@
// -----------------------------------------------------------------------------
// ValkeyCanonicalAdvisoryService.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-026
// Description: Decorator that integrates Valkey cache with CanonicalAdvisoryService
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Canonical;
namespace StellaOps.Concelier.Cache.Valkey;
/// <summary>
/// Decorator that integrates Valkey distributed cache with the canonical advisory service.
/// Provides cache-first reads with automatic population and invalidation on writes.
/// </summary>
public sealed class ValkeyCanonicalAdvisoryService : ICanonicalAdvisoryService
{
private readonly ICanonicalAdvisoryService _inner;
private readonly IAdvisoryCacheService _cache;
private readonly ILogger<ValkeyCanonicalAdvisoryService> _logger;
/// <summary>
/// Initializes a new instance of <see cref="ValkeyCanonicalAdvisoryService"/>.
/// </summary>
/// <param name="inner">The inner canonical advisory service.</param>
/// <param name="cache">The Valkey cache service.</param>
/// <param name="logger">Logger instance.</param>
public ValkeyCanonicalAdvisoryService(
ICanonicalAdvisoryService inner,
IAdvisoryCacheService cache,
ILogger<ValkeyCanonicalAdvisoryService> logger)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
#region Ingest Operations (Write-through with cache population)
/// <inheritdoc />
public async Task<IngestResult> IngestAsync(
string source,
RawAdvisory rawAdvisory,
CancellationToken ct = default)
{
var result = await _inner.IngestAsync(source, rawAdvisory, ct).ConfigureAwait(false);
// Populate or invalidate cache based on result
if (result.Decision != MergeDecision.Duplicate)
{
await InvalidateAndRefreshCacheAsync(result.CanonicalId, result.MergeHash, rawAdvisory.Cve, ct)
.ConfigureAwait(false);
}
return result;
}
/// <inheritdoc />
public async Task<IReadOnlyList<IngestResult>> IngestBatchAsync(
string source,
IEnumerable<RawAdvisory> advisories,
CancellationToken ct = default)
{
var results = await _inner.IngestBatchAsync(source, advisories, ct).ConfigureAwait(false);
// Invalidate cache for all affected entries (non-duplicates)
var affectedResults = results.Where(r => r.Decision != MergeDecision.Duplicate).ToList();
foreach (var result in affectedResults)
{
try
{
await _cache.InvalidateAsync(result.MergeHash, ct).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to invalidate cache for canonical {CanonicalId} during batch ingest",
result.CanonicalId);
}
}
if (affectedResults.Count > 0)
{
_logger.LogDebug(
"Invalidated {Count} cache entries during batch ingest",
affectedResults.Count);
}
return results;
}
#endregion
#region Query Operations (Cache-first with read-through)
/// <inheritdoc />
public async Task<CanonicalAdvisory?> GetByIdAsync(Guid id, CancellationToken ct = default)
{
// For ID-based lookups, we need to fetch from store first to get merge hash
// unless we maintain an ID->merge_hash index
var result = await _inner.GetByIdAsync(id, ct).ConfigureAwait(false);
if (result is not null)
{
// Populate cache for future merge-hash based lookups
try
{
await _cache.SetAsync(result, null, ct).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to populate cache for canonical {CanonicalId}", id);
}
}
return result;
}
/// <inheritdoc />
public async Task<CanonicalAdvisory?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default)
{
// Try cache first
try
{
var cached = await _cache.GetAsync(mergeHash, ct).ConfigureAwait(false);
if (cached is not null)
{
_logger.LogTrace("Valkey cache hit for merge hash {MergeHash}", mergeHash);
return cached;
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to get from cache, falling back to store for {MergeHash}", mergeHash);
}
// Cache miss - fetch from store
var result = await _inner.GetByMergeHashAsync(mergeHash, ct).ConfigureAwait(false);
if (result is not null)
{
// Populate cache
try
{
await _cache.SetAsync(result, null, ct).ConfigureAwait(false);
_logger.LogTrace("Populated cache for merge hash {MergeHash}", mergeHash);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to populate cache for merge hash {MergeHash}", mergeHash);
}
}
return result;
}
/// <inheritdoc />
public async Task<IReadOnlyList<CanonicalAdvisory>> GetByCveAsync(string cve, CancellationToken ct = default)
{
// Note: The cache stores only the primary canonical per CVE (GetByCveAsync returns single item).
// For full results, we always query the store but use cache to accelerate individual lookups.
// This is intentional - CVE queries may return multiple canonicals (different affected packages).
// Fetch from store (the source of truth for multiple canonicals per CVE)
var results = await _inner.GetByCveAsync(cve, ct).ConfigureAwait(false);
// Populate cache for each result (for future individual lookups)
foreach (var advisory in results)
{
try
{
await _cache.SetAsync(advisory, null, ct).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to populate cache for advisory {MergeHash} (CVE: {Cve})",
advisory.MergeHash, cve);
}
}
if (results.Count > 0)
{
_logger.LogTrace("Fetched {Count} advisories for CVE {Cve} and cached them", results.Count, cve);
}
return results;
}
/// <inheritdoc />
public async Task<IReadOnlyList<CanonicalAdvisory>> GetByArtifactAsync(
string artifactKey,
CancellationToken ct = default)
{
// Try cache first (uses PURL index)
try
{
var cached = await _cache.GetByPurlAsync(artifactKey, ct).ConfigureAwait(false);
if (cached.Count > 0)
{
_logger.LogTrace(
"Valkey cache hit for artifact {ArtifactKey} ({Count} items)",
artifactKey, cached.Count);
return cached;
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to get from cache for artifact {ArtifactKey}", artifactKey);
}
// Cache miss - fetch from store
var results = await _inner.GetByArtifactAsync(artifactKey, ct).ConfigureAwait(false);
// Populate cache for each result and update PURL index
foreach (var advisory in results)
{
try
{
await _cache.SetAsync(advisory, null, ct).ConfigureAwait(false);
await _cache.IndexPurlAsync(artifactKey, advisory.MergeHash, ct).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to populate cache for advisory {MergeHash} (artifact: {ArtifactKey})",
advisory.MergeHash, artifactKey);
}
}
return results;
}
/// <inheritdoc />
public Task<PagedResult<CanonicalAdvisory>> QueryAsync(
CanonicalQueryOptions options,
CancellationToken ct = default)
{
// Complex queries bypass cache - pass through to store
// Individual results could be cached, but we don't cache the query itself
return _inner.QueryAsync(options, ct);
}
#endregion
#region Status Operations (Write-through with cache invalidation)
/// <inheritdoc />
public async Task UpdateStatusAsync(Guid id, CanonicalStatus status, CancellationToken ct = default)
{
await _inner.UpdateStatusAsync(id, status, ct).ConfigureAwait(false);
// Fetch the canonical to get merge hash for cache invalidation
try
{
var canonical = await _inner.GetByIdAsync(id, ct).ConfigureAwait(false);
if (canonical is not null)
{
await _cache.InvalidateAsync(canonical.MergeHash, ct).ConfigureAwait(false);
_logger.LogDebug(
"Invalidated cache for canonical {CanonicalId} after status update to {Status}",
id, status);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to invalidate cache for canonical {CanonicalId} after status update",
id);
}
}
/// <inheritdoc />
public Task<int> DegradeToStubsAsync(double scoreThreshold, CancellationToken ct = default)
{
// This may affect many entries - invalidation will happen naturally through TTL
// or could be done through a background job if needed
return _inner.DegradeToStubsAsync(scoreThreshold, ct);
}
#endregion
#region Private Helpers
private async Task InvalidateAndRefreshCacheAsync(
Guid canonicalId,
string mergeHash,
string? cve,
CancellationToken ct)
{
try
{
// Invalidate existing entry
await _cache.InvalidateAsync(mergeHash, ct).ConfigureAwait(false);
// Fetch fresh data from store and populate cache
var canonical = await _inner.GetByIdAsync(canonicalId, ct).ConfigureAwait(false);
if (canonical is not null)
{
await _cache.SetAsync(canonical, null, ct).ConfigureAwait(false);
// Update CVE index
if (!string.IsNullOrWhiteSpace(cve))
{
await _cache.IndexCveAsync(cve, mergeHash, ct).ConfigureAwait(false);
}
// Update PURL index
if (!string.IsNullOrWhiteSpace(canonical.AffectsKey))
{
await _cache.IndexPurlAsync(canonical.AffectsKey, mergeHash, ct).ConfigureAwait(false);
}
_logger.LogDebug(
"Refreshed cache for canonical {CanonicalId} with merge hash {MergeHash}",
canonicalId, mergeHash);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to refresh cache for canonical {CanonicalId}",
canonicalId);
}
}
#endregion
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Documents;
@@ -12,6 +13,8 @@ using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
using StellaOps.Concelier.Connector.Distro.Alpine.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.Models;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
@@ -29,6 +32,7 @@ public sealed class AlpineConnector : IFeedConnector
private readonly AlpineOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AlpineConnector> _logger;
private readonly ICanonicalAdvisoryService? _canonicalService;
private static readonly Action<ILogger, string, int, Exception?> LogMapped =
LoggerMessage.Define<string, int>(
@@ -45,7 +49,8 @@ public sealed class AlpineConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<AlpineOptions> options,
TimeProvider? timeProvider,
ILogger<AlpineConnector> logger)
ILogger<AlpineConnector> logger,
ICanonicalAdvisoryService? canonicalService = null)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -57,6 +62,7 @@ public sealed class AlpineConnector : IFeedConnector
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_canonicalService = canonicalService; // Optional - canonical ingest
}
public string SourceName => AlpineConnectorPlugin.SourceName;
@@ -282,6 +288,12 @@ public sealed class AlpineConnector : IFeedConnector
foreach (var advisory in advisories)
{
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
// Ingest to canonical advisory service if available
if (_canonicalService is not null)
{
await IngestToCanonicalAsync(advisory, document.FetchedAt, cancellationToken).ConfigureAwait(false);
}
}
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
@@ -535,4 +547,89 @@ public sealed class AlpineConnector : IFeedConnector
}
private sealed record AlpineTarget(string Release, string Repository, string Stream, Uri Uri);
/// <summary>
/// Ingests Alpine advisory to canonical advisory service for deduplication.
/// Creates one RawAdvisory per affected package.
/// </summary>
private async Task IngestToCanonicalAsync(
Advisory advisory,
DateTimeOffset fetchedAt,
CancellationToken cancellationToken)
{
if (_canonicalService is null || advisory.AffectedPackages.IsEmpty)
{
return;
}
// Find primary CVE from aliases
var cve = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
?? advisory.AdvisoryKey;
// Extract CWE weaknesses
var weaknesses = advisory.Cwes
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
.Select(w => w.Identifier)
.ToList();
// Create one RawAdvisory per affected package
foreach (var affected in advisory.AffectedPackages)
{
if (string.IsNullOrWhiteSpace(affected.Identifier))
{
continue;
}
// Build version range JSON
string? versionRangeJson = null;
if (!affected.VersionRanges.IsEmpty)
{
var firstRange = affected.VersionRanges[0];
var rangeObj = new
{
introduced = firstRange.IntroducedVersion,
@fixed = firstRange.FixedVersion,
last_affected = firstRange.LastAffectedVersion
};
versionRangeJson = JsonSerializer.Serialize(rangeObj);
}
var rawAdvisory = new RawAdvisory
{
SourceAdvisoryId = advisory.AdvisoryKey,
Cve = cve,
AffectsKey = affected.Identifier,
VersionRangeJson = versionRangeJson,
Weaknesses = weaknesses,
PatchLineage = null,
Severity = advisory.Severity,
Title = advisory.Title,
Summary = advisory.Summary,
VendorStatus = VendorStatus.Affected,
RawPayloadJson = null, // Alpine doesn't store raw JSON per advisory
FetchedAt = fetchedAt
};
try
{
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"Canonical ingest for {AdvisoryKey}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
advisory.AdvisoryKey, affected.Identifier, result.Decision, result.CanonicalId);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to ingest {AdvisoryKey}/{AffectsKey} to canonical service",
advisory.AdvisoryKey, affected.Identifier);
// Don't fail the mapping operation for canonical ingest failures
}
}
}
}

View File

@@ -18,6 +18,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Debian;
@@ -35,6 +36,7 @@ public sealed class DebianConnector : IFeedConnector
private readonly DebianOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<DebianConnector> _logger;
private readonly ICanonicalAdvisoryService? _canonicalService;
private static readonly Action<ILogger, string, int, Exception?> LogMapped =
LoggerMessage.Define<string, int>(
@@ -51,7 +53,8 @@ public sealed class DebianConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<DebianOptions> options,
TimeProvider? timeProvider,
ILogger<DebianConnector> logger)
ILogger<DebianConnector> logger,
ICanonicalAdvisoryService? canonicalService = null)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -63,6 +66,7 @@ public sealed class DebianConnector : IFeedConnector
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_canonicalService = canonicalService; // Optional - canonical ingest
}
public string SourceName => DebianConnectorPlugin.SourceName;
@@ -427,6 +431,14 @@ public sealed class DebianConnector : IFeedConnector
var advisory = DebianMapper.Map(dto, document, _timeProvider.GetUtcNow());
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
// Ingest to canonical advisory service if available
if (_canonicalService is not null)
{
var rawPayloadJson = dtoRecord.Payload.ToJson(new JsonWriterSettings { OutputMode = JsonOutputMode.RelaxedExtendedJson });
await IngestToCanonicalAsync(advisory, rawPayloadJson, document.FetchedAt, cancellationToken).ConfigureAwait(false);
}
pendingMappings.Remove(documentId);
LogMapped(_logger, dto.AdvisoryId, advisory.AffectedPackages.Length, null);
}
@@ -634,4 +646,90 @@ public sealed class DebianConnector : IFeedConnector
packages,
references);
}
/// <summary>
/// Ingests Debian advisory to canonical advisory service for deduplication.
/// Creates one RawAdvisory per affected package.
/// </summary>
private async Task IngestToCanonicalAsync(
Advisory advisory,
string rawPayloadJson,
DateTimeOffset fetchedAt,
CancellationToken cancellationToken)
{
if (_canonicalService is null || advisory.AffectedPackages.IsEmpty)
{
return;
}
// Find primary CVE from aliases
var cve = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
?? advisory.AdvisoryKey;
// Extract CWE weaknesses
var weaknesses = advisory.Cwes
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
.Select(w => w.Identifier)
.ToList();
// Create one RawAdvisory per affected package
foreach (var affected in advisory.AffectedPackages)
{
if (string.IsNullOrWhiteSpace(affected.Identifier))
{
continue;
}
// Build version range JSON
string? versionRangeJson = null;
if (!affected.VersionRanges.IsEmpty)
{
var firstRange = affected.VersionRanges[0];
var rangeObj = new
{
introduced = firstRange.IntroducedVersion,
@fixed = firstRange.FixedVersion,
last_affected = firstRange.LastAffectedVersion
};
versionRangeJson = System.Text.Json.JsonSerializer.Serialize(rangeObj);
}
var rawAdvisory = new RawAdvisory
{
SourceAdvisoryId = advisory.AdvisoryKey,
Cve = cve,
AffectsKey = affected.Identifier,
VersionRangeJson = versionRangeJson,
Weaknesses = weaknesses,
PatchLineage = null,
Severity = advisory.Severity,
Title = advisory.Title,
Summary = advisory.Summary,
VendorStatus = VendorStatus.Affected,
RawPayloadJson = rawPayloadJson,
FetchedAt = fetchedAt
};
try
{
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"Canonical ingest for {AdvisoryKey}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
advisory.AdvisoryKey, affected.Identifier, result.Decision, result.CanonicalId);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to ingest {AdvisoryKey}/{AffectsKey} to canonical service",
advisory.AdvisoryKey, affected.Identifier);
// Don't fail the mapping operation for canonical ingest failures
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using StellaOps.Concelier.Documents;
using StellaOps.Concelier.Storage.Contracts;
namespace StellaOps.Concelier.Connector.Distro.Debian.Internal;
@@ -10,6 +11,9 @@ internal sealed record DebianFetchCacheEntry(string? ETag, DateTimeOffset? LastM
public static DebianFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.DocumentRecord document)
=> new(document.Etag, document.LastModified);
public static DebianFetchCacheEntry FromDocument(StorageDocument document)
=> new(document.Etag, document.LastModified);
public static DebianFetchCacheEntry FromDocument(DocumentObject document)
{
if (document is null || document.ElementCount == 0)

View File

@@ -16,6 +16,7 @@ using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.RedHat;
@@ -31,6 +32,7 @@ public sealed class RedHatConnector : IFeedConnector
private readonly ILogger<RedHatConnector> _logger;
private readonly RedHatOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ICanonicalAdvisoryService? _canonicalService;
public RedHatConnector(
SourceFetchService fetchService,
@@ -41,7 +43,8 @@ public sealed class RedHatConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<RedHatOptions> options,
TimeProvider? timeProvider,
ILogger<RedHatConnector> logger)
ILogger<RedHatConnector> logger,
ICanonicalAdvisoryService? canonicalService = null)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -53,6 +56,7 @@ public sealed class RedHatConnector : IFeedConnector
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_canonicalService = canonicalService; // Optional - canonical ingest
}
public string SourceName => RedHatConnectorPlugin.SourceName;
@@ -387,6 +391,13 @@ public sealed class RedHatConnector : IFeedConnector
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
// Ingest to canonical advisory service if available
if (_canonicalService is not null)
{
await IngestToCanonicalAsync(advisory, json, document.FetchedAt, cancellationToken).ConfigureAwait(false);
}
pendingMappings.Remove(documentId);
}
catch (Exception ex)
@@ -431,4 +442,90 @@ public sealed class RedHatConnector : IFeedConnector
$"{Uri.EscapeDataString(kvp.Key)}={Uri.EscapeDataString(kvp.Value)}"));
return builder.Uri;
}
/// <summary>
/// Ingests Red Hat advisory to canonical advisory service for deduplication.
/// Creates one RawAdvisory per affected package.
/// </summary>
private async Task IngestToCanonicalAsync(
Advisory advisory,
string rawPayloadJson,
DateTimeOffset fetchedAt,
CancellationToken cancellationToken)
{
if (_canonicalService is null || advisory.AffectedPackages.IsEmpty)
{
return;
}
// Find primary CVE from aliases
var cve = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
?? advisory.AdvisoryKey;
// Extract CWE weaknesses
var weaknesses = advisory.Cwes
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
.Select(w => w.Identifier)
.ToList();
// Create one RawAdvisory per affected package
foreach (var affected in advisory.AffectedPackages)
{
if (string.IsNullOrWhiteSpace(affected.Identifier))
{
continue;
}
// Build version range JSON
string? versionRangeJson = null;
if (!affected.VersionRanges.IsEmpty)
{
var firstRange = affected.VersionRanges[0];
var rangeObj = new
{
introduced = firstRange.IntroducedVersion,
@fixed = firstRange.FixedVersion,
last_affected = firstRange.LastAffectedVersion
};
versionRangeJson = JsonSerializer.Serialize(rangeObj);
}
var rawAdvisory = new RawAdvisory
{
SourceAdvisoryId = advisory.AdvisoryKey,
Cve = cve,
AffectsKey = affected.Identifier,
VersionRangeJson = versionRangeJson,
Weaknesses = weaknesses,
PatchLineage = null,
Severity = advisory.Severity,
Title = advisory.Title,
Summary = advisory.Summary,
VendorStatus = VendorStatus.Affected,
RawPayloadJson = rawPayloadJson,
FetchedAt = fetchedAt
};
try
{
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"Canonical ingest for {AdvisoryKey}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
advisory.AdvisoryKey, affected.Identifier, result.Decision, result.CanonicalId);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to ingest {AdvisoryKey}/{AffectsKey} to canonical service",
advisory.AdvisoryKey, affected.Identifier);
// Don't fail the mapping operation for canonical ingest failures
}
}
}
}

View File

@@ -18,8 +18,7 @@ using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
using StellaOps.Concelier.Connector.Distro.Suse.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Suse;
@@ -41,6 +40,7 @@ public sealed class SuseConnector : IFeedConnector
private readonly SuseOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<SuseConnector> _logger;
private readonly ICanonicalAdvisoryService? _canonicalService;
public SuseConnector(
SourceFetchService fetchService,
@@ -51,7 +51,8 @@ public sealed class SuseConnector : IFeedConnector
ISourceStateRepository stateRepository,
IOptions<SuseOptions> options,
TimeProvider? timeProvider,
ILogger<SuseConnector> logger)
ILogger<SuseConnector> logger,
ICanonicalAdvisoryService? canonicalService = null)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -63,6 +64,7 @@ public sealed class SuseConnector : IFeedConnector
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_canonicalService = canonicalService; // Optional - canonical ingest
}
public string SourceName => SuseConnectorPlugin.SourceName;
@@ -403,8 +405,15 @@ public sealed class SuseConnector : IFeedConnector
var advisory = SuseMapper.Map(dto, document, _timeProvider.GetUtcNow());
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
// Ingest to canonical advisory service if available
if (_canonicalService is not null)
{
var rawPayloadJson = dtoRecord.Payload.ToJson(new JsonWriterSettings { OutputMode = JsonOutputMode.RelaxedExtendedJson });
await IngestToCanonicalAsync(advisory, rawPayloadJson, document.FetchedAt, cancellationToken).ConfigureAwait(false);
}
pendingMappings.Remove(documentId);
LogMapped(_logger, dto.AdvisoryId, advisory.AffectedPackages.Length, null);
}
@@ -570,4 +579,90 @@ public sealed class SuseConnector : IFeedConnector
packageList,
referenceList);
}
/// <summary>
/// Ingests SUSE advisory to canonical advisory service for deduplication.
/// Creates one RawAdvisory per affected package.
/// </summary>
private async Task IngestToCanonicalAsync(
Models.Advisory advisory,
string rawPayloadJson,
DateTimeOffset fetchedAt,
CancellationToken cancellationToken)
{
if (_canonicalService is null || advisory.AffectedPackages.IsEmpty)
{
return;
}
// Find primary CVE from aliases
var cve = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
?? advisory.AdvisoryKey;
// Extract CWE weaknesses
var weaknesses = advisory.Cwes
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
.Select(w => w.Identifier)
.ToList();
// Create one RawAdvisory per affected package
foreach (var affected in advisory.AffectedPackages)
{
if (string.IsNullOrWhiteSpace(affected.Identifier))
{
continue;
}
// Build version range JSON
string? versionRangeJson = null;
if (!affected.VersionRanges.IsEmpty)
{
var firstRange = affected.VersionRanges[0];
var rangeObj = new
{
introduced = firstRange.IntroducedVersion,
@fixed = firstRange.FixedVersion,
last_affected = firstRange.LastAffectedVersion
};
versionRangeJson = JsonSerializer.Serialize(rangeObj);
}
var rawAdvisory = new RawAdvisory
{
SourceAdvisoryId = advisory.AdvisoryKey,
Cve = cve,
AffectsKey = affected.Identifier,
VersionRangeJson = versionRangeJson,
Weaknesses = weaknesses,
PatchLineage = null,
Severity = advisory.Severity,
Title = advisory.Title,
Summary = advisory.Summary,
VendorStatus = VendorStatus.Affected,
RawPayloadJson = rawPayloadJson,
FetchedAt = fetchedAt
};
try
{
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"Canonical ingest for {AdvisoryKey}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
advisory.AdvisoryKey, affected.Identifier, result.Decision, result.CanonicalId);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to ingest {AdvisoryKey}/{AffectsKey} to canonical service",
advisory.AdvisoryKey, affected.Identifier);
// Don't fail the mapping operation for canonical ingest failures
}
}
}
}

View File

@@ -13,10 +13,10 @@ using StellaOps.Concelier.Connector.Distro.Ubuntu.Configuration;
using StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Plugin;
using StellaOps.Cryptography;
using System.Text.Json;
namespace StellaOps.Concelier.Connector.Distro.Ubuntu;
@@ -32,6 +32,7 @@ public sealed class UbuntuConnector : IFeedConnector
private readonly TimeProvider _timeProvider;
private readonly ILogger<UbuntuConnector> _logger;
private readonly ICryptoHash _hash;
private readonly ICanonicalAdvisoryService? _canonicalService;
private static readonly Action<ILogger, string, int, Exception?> LogMapped =
LoggerMessage.Define<string, int>(
@@ -49,7 +50,8 @@ public sealed class UbuntuConnector : IFeedConnector
IOptions<UbuntuOptions> options,
TimeProvider? timeProvider,
ILogger<UbuntuConnector> logger,
ICryptoHash cryptoHash)
ICryptoHash cryptoHash,
ICanonicalAdvisoryService? canonicalService = null)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -62,6 +64,7 @@ public sealed class UbuntuConnector : IFeedConnector
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_canonicalService = canonicalService; // Optional - canonical ingest
}
public string SourceName => UbuntuConnectorPlugin.SourceName;
@@ -230,6 +233,13 @@ public sealed class UbuntuConnector : IFeedConnector
var advisory = UbuntuMapper.Map(notice, document, _timeProvider.GetUtcNow());
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
// Ingest to canonical advisory service if available
if (_canonicalService is not null)
{
await IngestToCanonicalAsync(advisory, document.FetchedAt, cancellationToken).ConfigureAwait(false);
}
pending.Remove(documentId);
LogMapped(_logger, notice.NoticeId, advisory.AffectedPackages.Length, null);
@@ -537,4 +547,89 @@ public sealed class UbuntuConnector : IFeedConnector
packages,
references);
}
/// <summary>
/// Ingests Ubuntu notice to canonical advisory service for deduplication.
/// Creates one RawAdvisory per affected package.
/// </summary>
private async Task IngestToCanonicalAsync(
Advisory advisory,
DateTimeOffset fetchedAt,
CancellationToken cancellationToken)
{
if (_canonicalService is null || advisory.AffectedPackages.IsEmpty)
{
return;
}
// Find primary CVE from aliases
var cve = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
?? advisory.AdvisoryKey;
// Extract CWE weaknesses
var weaknesses = advisory.Cwes
.Where(w => w.Identifier.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
.Select(w => w.Identifier)
.ToList();
// Create one RawAdvisory per affected package
foreach (var affected in advisory.AffectedPackages)
{
if (string.IsNullOrWhiteSpace(affected.Identifier))
{
continue;
}
// Build version range JSON
string? versionRangeJson = null;
if (!affected.VersionRanges.IsEmpty)
{
var firstRange = affected.VersionRanges[0];
var rangeObj = new
{
introduced = firstRange.IntroducedVersion,
@fixed = firstRange.FixedVersion,
last_affected = firstRange.LastAffectedVersion
};
versionRangeJson = JsonSerializer.Serialize(rangeObj);
}
var rawAdvisory = new RawAdvisory
{
SourceAdvisoryId = advisory.AdvisoryKey,
Cve = cve,
AffectsKey = affected.Identifier,
VersionRangeJson = versionRangeJson,
Weaknesses = weaknesses,
PatchLineage = null,
Severity = advisory.Severity,
Title = advisory.Title,
Summary = advisory.Summary,
VendorStatus = VendorStatus.Affected,
RawPayloadJson = null, // Ubuntu notices don't need raw payload
FetchedAt = fetchedAt
};
try
{
var result = await _canonicalService.IngestAsync(SourceName, rawAdvisory, cancellationToken).ConfigureAwait(false);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"Canonical ingest for {AdvisoryKey}/{AffectsKey}: {Decision} (canonical={CanonicalId})",
advisory.AdvisoryKey, affected.Identifier, result.Decision, result.CanonicalId);
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to ingest {AdvisoryKey}/{AffectsKey} to canonical service",
advisory.AdvisoryKey, affected.Identifier);
// Don't fail the mapping operation for canonical ingest failures
}
}
}
}

View File

@@ -13,8 +13,6 @@ using StellaOps.Concelier.Connector.Ghsa.Configuration;
using StellaOps.Concelier.Connector.Ghsa.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Plugin;

View File

@@ -0,0 +1,131 @@
using ZstdSharp;
namespace StellaOps.Concelier.Federation.Compression;
/// <summary>
/// ZStandard compression utilities for federation bundles.
/// </summary>
public static class ZstdCompression
{
/// <summary>Default compression level (balanced speed/ratio).</summary>
public const int DefaultLevel = 3;
/// <summary>Minimum compression level.</summary>
public const int MinLevel = 1;
/// <summary>Maximum compression level.</summary>
public const int MaxLevel = 19;
/// <summary>
/// Create a compression stream wrapping the output.
/// </summary>
/// <param name="output">Target stream for compressed data.</param>
/// <param name="level">Compression level (1-19, default 3).</param>
/// <returns>Stream that compresses data written to it.</returns>
public static CompressionStream CreateCompressionStream(Stream output, int level = DefaultLevel)
{
ValidateLevel(level);
return new CompressionStream(output, level);
}
/// <summary>
/// Create a decompression stream wrapping the input.
/// </summary>
/// <param name="input">Source stream of compressed data.</param>
/// <returns>Stream that decompresses data read from it.</returns>
public static DecompressionStream CreateDecompressionStream(Stream input)
{
return new DecompressionStream(input);
}
/// <summary>
/// Compress data in memory.
/// </summary>
/// <param name="data">Uncompressed data.</param>
/// <param name="level">Compression level (1-19, default 3).</param>
/// <returns>Compressed data.</returns>
public static byte[] Compress(ReadOnlySpan<byte> data, int level = DefaultLevel)
{
ValidateLevel(level);
using var compressor = new Compressor(level);
return compressor.Wrap(data).ToArray();
}
/// <summary>
/// Decompress data in memory.
/// </summary>
/// <param name="compressed">Compressed data.</param>
/// <returns>Decompressed data.</returns>
public static byte[] Decompress(ReadOnlySpan<byte> compressed)
{
using var decompressor = new Decompressor();
return decompressor.Unwrap(compressed).ToArray();
}
/// <summary>
/// Compress a file to another file.
/// </summary>
public static async Task CompressFileAsync(
string inputPath,
string outputPath,
int level = DefaultLevel,
CancellationToken ct = default)
{
ValidateLevel(level);
await using var input = File.OpenRead(inputPath);
await using var output = File.Create(outputPath);
await using var compressor = CreateCompressionStream(output, level);
await input.CopyToAsync(compressor, ct);
}
/// <summary>
/// Decompress a file to another file.
/// </summary>
public static async Task DecompressFileAsync(
string inputPath,
string outputPath,
CancellationToken ct = default)
{
await using var input = File.OpenRead(inputPath);
await using var decompressor = CreateDecompressionStream(input);
await using var output = File.Create(outputPath);
await decompressor.CopyToAsync(output, ct);
}
/// <summary>
/// Decompress from one stream to another.
/// </summary>
public static async Task DecompressAsync(
Stream input,
Stream output,
CancellationToken ct = default)
{
await using var decompressor = CreateDecompressionStream(input);
await decompressor.CopyToAsync(output, ct);
}
/// <summary>
/// Compress from one stream to another.
/// </summary>
public static async Task CompressAsync(
Stream input,
Stream output,
int level = DefaultLevel,
CancellationToken ct = default)
{
ValidateLevel(level);
await using var compressor = CreateCompressionStream(output, level);
await input.CopyToAsync(compressor, ct);
}
private static void ValidateLevel(int level)
{
if (level < MinLevel || level > MaxLevel)
{
throw new ArgumentOutOfRangeException(
nameof(level),
level,
$"Compression level must be between {MinLevel} and {MaxLevel}");
}
}
}

View File

@@ -0,0 +1,303 @@
using System.Diagnostics;
using System.Formats.Tar;
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Federation.Compression;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Concelier.Federation.Serialization;
using StellaOps.Concelier.Federation.Signing;
using StellaOps.Concelier.Storage.Postgres.Repositories;
namespace StellaOps.Concelier.Federation.Export;
/// <summary>
/// Service for exporting federation bundles with ZST compression.
/// </summary>
public sealed class BundleExportService : IBundleExportService
{
private readonly IDeltaQueryService _deltaQuery;
private readonly IBundleSigner _signer;
private readonly FederationOptions _options;
private readonly ILogger<BundleExportService> _logger;
public BundleExportService(
IDeltaQueryService deltaQuery,
IBundleSigner signer,
IOptions<FederationOptions> options,
ILogger<BundleExportService> logger)
{
_deltaQuery = deltaQuery;
_signer = signer;
_options = options.Value;
_logger = logger;
}
/// <inheritdoc />
public async Task<BundleExportResult> ExportAsync(
string? sinceCursor = null,
BundleExportOptions? options = null,
CancellationToken ct = default)
{
options ??= new BundleExportOptions();
// Create temporary file for bundle
var tempPath = Path.GetTempFileName();
try
{
await using var fileStream = File.Create(tempPath);
var result = await ExportToStreamInternalAsync(fileStream, sinceCursor, options, ct);
// Update with actual file size
return result with
{
CompressedSizeBytes = new FileInfo(tempPath).Length
};
}
finally
{
// Cleanup temp file
try { File.Delete(tempPath); } catch { /* ignore */ }
}
}
/// <inheritdoc />
public async Task<BundleExportResult> ExportToStreamAsync(
Stream output,
string? sinceCursor = null,
BundleExportOptions? options = null,
CancellationToken ct = default)
{
options ??= new BundleExportOptions();
return await ExportToStreamInternalAsync(output, sinceCursor, options, ct);
}
/// <inheritdoc />
public async Task<BundleExportPreview> PreviewAsync(
string? sinceCursor = null,
CancellationToken ct = default)
{
var queryOptions = new DeltaQueryOptions
{
MaxItems = 100_000, // Preview can count more
IncludeEdges = true,
IncludeDeletions = true
};
var counts = await _deltaQuery.CountChangedSinceAsync(sinceCursor, queryOptions, ct);
// Estimate size: ~500 bytes per canonical, ~200 bytes per edge, ~50 bytes per deletion
long estimatedSize = (counts.Canonicals * 500L) + (counts.Edges * 200L) + (counts.Deletions * 50L);
// Compression typically achieves 3-5x reduction
long compressedEstimate = estimatedSize / 4;
return new BundleExportPreview
{
EstimatedCanonicals = counts.Canonicals,
EstimatedEdges = counts.Edges,
EstimatedDeletions = counts.Deletions,
EstimatedSizeBytes = compressedEstimate
};
}
private async Task<BundleExportResult> ExportToStreamInternalAsync(
Stream output,
string? sinceCursor,
BundleExportOptions options,
CancellationToken ct)
{
var stopwatch = Stopwatch.StartNew();
_logger.LogInformation(
"Starting bundle export since cursor {Cursor}",
sinceCursor ?? "beginning");
// Query changes
var queryOptions = new DeltaQueryOptions
{
MaxItems = options.MaxItems,
IncludeSources = options.IncludeSources,
ExcludeSources = options.ExcludeSources,
IncludeEdges = true,
IncludeDeletions = true
};
var changes = await _deltaQuery.GetChangedSinceAsync(sinceCursor, queryOptions, ct);
// Use a memory stream to build content, then compress
await using var contentStream = new MemoryStream();
await using var tarWriter = new TarWriter(contentStream, leaveOpen: true);
// Write canonicals NDJSON
int canonicalCount = 0;
await using var canonicalBuffer = new MemoryStream();
await foreach (var canonical in changes.Canonicals.WithCancellation(ct))
{
await BundleSerializer.WriteCanonicalLineAsync(canonicalBuffer, canonical, ct);
canonicalCount++;
}
canonicalBuffer.Position = 0;
await WriteEntryAsync(tarWriter, "canonicals.ndjson", canonicalBuffer, ct);
// Write edges NDJSON
int edgeCount = 0;
await using var edgeBuffer = new MemoryStream();
await foreach (var edge in changes.Edges.WithCancellation(ct))
{
await BundleSerializer.WriteEdgeLineAsync(edgeBuffer, edge, ct);
edgeCount++;
}
edgeBuffer.Position = 0;
await WriteEntryAsync(tarWriter, "edges.ndjson", edgeBuffer, ct);
// Write deletions NDJSON
int deletionCount = 0;
await using var deletionBuffer = new MemoryStream();
await foreach (var deletion in changes.Deletions.WithCancellation(ct))
{
await BundleSerializer.WriteDeletionLineAsync(deletionBuffer, deletion, ct);
deletionCount++;
}
deletionBuffer.Position = 0;
await WriteEntryAsync(tarWriter, "deletions.ndjson", deletionBuffer, ct);
// Generate new cursor
var exportCursor = CursorFormat.Create(DateTimeOffset.UtcNow);
// Build manifest
var manifest = new BundleManifest
{
Version = "feedser-bundle/1.0",
SiteId = _options.SiteId,
ExportCursor = exportCursor,
SinceCursor = sinceCursor,
ExportedAt = DateTimeOffset.UtcNow,
Counts = new BundleCounts
{
Canonicals = canonicalCount,
Edges = edgeCount,
Deletions = deletionCount
},
BundleHash = "" // Placeholder, computed after compression
};
// Write manifest
var manifestBytes = BundleSerializer.SerializeManifest(manifest);
await using var manifestStream = new MemoryStream(manifestBytes);
await WriteEntryAsync(tarWriter, "MANIFEST.json", manifestStream, ct);
// Finalize tar
await tarWriter.DisposeAsync();
// Get tar content
contentStream.Position = 0;
var tarContent = contentStream.ToArray();
// Compress with ZST
var compressed = ZstdCompression.Compress(tarContent, options.CompressionLevel);
// Compute hash of compressed content
var bundleHash = ComputeHash(compressed);
// Update manifest with hash and rewrite
manifest = manifest with { BundleHash = bundleHash };
var finalManifestBytes = BundleSerializer.SerializeManifest(manifest);
// Rebuild tar with updated manifest
await using var finalContent = new MemoryStream();
await using var finalTar = new TarWriter(finalContent, leaveOpen: true);
await WriteEntryAsync(finalTar, "MANIFEST.json", new MemoryStream(finalManifestBytes), ct);
canonicalBuffer.Position = 0;
await WriteEntryAsync(finalTar, "canonicals.ndjson", canonicalBuffer, ct);
edgeBuffer.Position = 0;
await WriteEntryAsync(finalTar, "edges.ndjson", edgeBuffer, ct);
deletionBuffer.Position = 0;
await WriteEntryAsync(finalTar, "deletions.ndjson", deletionBuffer, ct);
await finalTar.DisposeAsync();
// Compress final content
finalContent.Position = 0;
var finalCompressed = ZstdCompression.Compress(finalContent.ToArray(), options.CompressionLevel);
// Recompute hash
bundleHash = ComputeHash(finalCompressed);
// Sign bundle if requested
BundleSignature? signature = null;
if (options.Sign)
{
var signingResult = await _signer.SignBundleAsync(bundleHash, _options.SiteId, ct);
if (signingResult.Success)
{
signature = signingResult.Signature;
_logger.LogDebug("Bundle signed successfully with key {KeyId}",
signature?.Signatures.FirstOrDefault()?.KeyId ?? "unknown");
}
else
{
_logger.LogWarning("Bundle signing failed: {Error}", signingResult.ErrorMessage);
}
}
// Write to output
await output.WriteAsync(finalCompressed, ct);
await output.FlushAsync(ct);
stopwatch.Stop();
_logger.LogInformation(
"Bundle export complete: {Canonicals} canonicals, {Edges} edges, {Deletions} deletions, {Size} bytes, {Duration}ms",
canonicalCount, edgeCount, deletionCount, finalCompressed.Length, stopwatch.ElapsedMilliseconds);
return new BundleExportResult
{
BundleHash = bundleHash,
ExportCursor = exportCursor,
SinceCursor = sinceCursor,
Counts = manifest.Counts,
CompressedSizeBytes = finalCompressed.Length,
Signature = signature,
Duration = stopwatch.Elapsed
};
}
private static async Task WriteEntryAsync(TarWriter writer, string name, Stream content, CancellationToken ct)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
DataStream = content
};
await writer.WriteEntryAsync(entry, ct);
}
private static string ComputeHash(ReadOnlySpan<byte> data)
{
var hash = SHA256.HashData(data);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}
/// <summary>
/// Configuration options for federation operations.
/// </summary>
public sealed class FederationOptions
{
/// <summary>Configuration section name.</summary>
public const string SectionName = "Federation";
/// <summary>Site identifier for this instance.</summary>
public string SiteId { get; set; } = "default";
/// <summary>Default compression level (1-19).</summary>
public int DefaultCompressionLevel { get; set; } = 3;
/// <summary>Default maximum items per export.</summary>
public int DefaultMaxItems { get; set; } = 10_000;
}

View File

@@ -0,0 +1,313 @@
using System.Runtime.CompilerServices;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Concelier.Storage.Postgres.Repositories;
namespace StellaOps.Concelier.Federation.Export;
/// <summary>
/// Service for querying changes since a cursor position.
/// </summary>
public sealed class DeltaQueryService : IDeltaQueryService
{
private readonly ICanonicalAdvisoryStore _canonicalStore;
private readonly ILogger<DeltaQueryService> _logger;
public DeltaQueryService(
ICanonicalAdvisoryStore canonicalStore,
ILogger<DeltaQueryService> logger)
{
_canonicalStore = canonicalStore;
_logger = logger;
}
/// <inheritdoc />
public async Task<DeltaChangeSet> GetChangedSinceAsync(
string? sinceCursor,
DeltaQueryOptions? options = null,
CancellationToken ct = default)
{
options ??= new DeltaQueryOptions();
var sinceTimestamp = ParseCursor(sinceCursor);
var newCursor = CursorFormat.Create(DateTimeOffset.UtcNow);
_logger.LogInformation(
"Querying changes since {Cursor} (timestamp: {Since})",
sinceCursor ?? "beginning",
sinceTimestamp?.ToString("O") ?? "null");
return new DeltaChangeSet
{
Canonicals = GetChangedCanonicalsAsync(sinceTimestamp, options, ct),
Edges = options.IncludeEdges
? GetChangedEdgesAsync(sinceTimestamp, options, ct)
: EmptyAsync<EdgeBundleLine>(),
Deletions = options.IncludeDeletions
? GetDeletedCanonicalsAsync(sinceTimestamp, options, ct)
: EmptyAsync<DeletionBundleLine>(),
NewCursor = newCursor
};
}
/// <inheritdoc />
public async Task<DeltaCounts> CountChangedSinceAsync(
string? sinceCursor,
DeltaQueryOptions? options = null,
CancellationToken ct = default)
{
options ??= new DeltaQueryOptions();
var sinceTimestamp = ParseCursor(sinceCursor);
// Query active/stub to count changes
var queryOptions = new CanonicalQueryOptions
{
UpdatedSince = sinceTimestamp,
Limit = options.MaxItems + 1 // Check if more exist
};
var result = await _canonicalStore.QueryAsync(queryOptions, ct);
// Filter by status on results (active and stub)
var activeItems = result.Items
.Where(c => c.Status == CanonicalStatus.Active || c.Status == CanonicalStatus.Stub)
.ToList();
int canonicalCount = activeItems.Count;
int edgeCount = activeItems.Sum(c => c.SourceEdges.Count);
bool hasMore = result.Items.Count > options.MaxItems;
// Count deletions (withdrawn status)
int deletionCount = 0;
if (options.IncludeDeletions)
{
var withdrawnQuery = new CanonicalQueryOptions
{
UpdatedSince = sinceTimestamp,
Limit = options.MaxItems,
Status = CanonicalStatus.Withdrawn
};
var withdrawnResult = await _canonicalStore.QueryAsync(withdrawnQuery, ct);
deletionCount = withdrawnResult.Items.Count;
}
return new DeltaCounts
{
Canonicals = Math.Min(canonicalCount, options.MaxItems),
Edges = edgeCount,
Deletions = deletionCount,
HasMore = hasMore
};
}
private async IAsyncEnumerable<CanonicalBundleLine> GetChangedCanonicalsAsync(
DateTimeOffset? sinceTimestamp,
DeltaQueryOptions options,
[EnumeratorCancellation] CancellationToken ct)
{
var queryOptions = new CanonicalQueryOptions
{
UpdatedSince = sinceTimestamp,
Limit = options.MaxItems
};
var result = await _canonicalStore.QueryAsync(queryOptions, ct);
int count = 0;
// Filter to active/stub status
foreach (var canonical in result.Items.Where(c =>
c.Status == CanonicalStatus.Active || c.Status == CanonicalStatus.Stub))
{
if (count >= options.MaxItems)
break;
// Apply source filtering
if (!PassesSourceFilter(canonical, options))
continue;
yield return MapToCanonicalLine(canonical);
count++;
}
_logger.LogDebug("Yielded {Count} changed canonicals", count);
}
private async IAsyncEnumerable<EdgeBundleLine> GetChangedEdgesAsync(
DateTimeOffset? sinceTimestamp,
DeltaQueryOptions options,
[EnumeratorCancellation] CancellationToken ct)
{
var queryOptions = new CanonicalQueryOptions
{
UpdatedSince = sinceTimestamp,
Limit = options.MaxItems
};
var result = await _canonicalStore.QueryAsync(queryOptions, ct);
int count = 0;
// Filter to active/stub status
foreach (var canonical in result.Items.Where(c =>
c.Status == CanonicalStatus.Active || c.Status == CanonicalStatus.Stub))
{
foreach (var edge in canonical.SourceEdges)
{
if (count >= options.MaxItems)
yield break;
// Apply source filtering to edges
if (!PassesEdgeSourceFilter(edge.SourceName, options))
continue;
yield return MapToEdgeLine(edge);
count++;
}
}
_logger.LogDebug("Yielded {Count} changed edges", count);
}
private async IAsyncEnumerable<DeletionBundleLine> GetDeletedCanonicalsAsync(
DateTimeOffset? sinceTimestamp,
DeltaQueryOptions options,
[EnumeratorCancellation] CancellationToken ct)
{
var queryOptions = new CanonicalQueryOptions
{
UpdatedSince = sinceTimestamp,
Limit = options.MaxItems,
Status = CanonicalStatus.Withdrawn
};
var result = await _canonicalStore.QueryAsync(queryOptions, ct);
int count = 0;
foreach (var canonical in result.Items)
{
if (count >= options.MaxItems)
break;
yield return new DeletionBundleLine
{
CanonicalId = canonical.Id,
Reason = "withdrawn",
DeletedAt = canonical.UpdatedAt
};
count++;
}
_logger.LogDebug("Yielded {Count} deletions", count);
}
private static CanonicalBundleLine MapToCanonicalLine(CanonicalAdvisory canonical)
{
return new CanonicalBundleLine
{
Id = canonical.Id,
Cve = canonical.Cve,
AffectsKey = canonical.AffectsKey,
MergeHash = canonical.MergeHash,
Status = canonical.Status.ToString().ToLowerInvariant(),
Severity = canonical.Severity,
Title = canonical.Title,
SourceEdges = canonical.SourceEdges.Select(e => e.Id).ToArray(),
UpdatedAt = canonical.UpdatedAt
};
}
private static EdgeBundleLine MapToEdgeLine(SourceEdge edge)
{
return new EdgeBundleLine
{
Id = edge.Id,
CanonicalId = edge.CanonicalId,
Source = edge.SourceName,
SourceAdvisoryId = edge.SourceAdvisoryId,
VendorStatus = edge.VendorStatus?.ToString().ToLowerInvariant(),
DsseEnvelope = edge.DsseEnvelope,
ContentHash = edge.SourceDocHash,
UpdatedAt = edge.FetchedAt
};
}
private static bool PassesSourceFilter(CanonicalAdvisory canonical, DeltaQueryOptions options)
{
if (options.IncludeSources == null && options.ExcludeSources == null)
return true;
var sources = canonical.SourceEdges.Select(e => e.SourceName).ToHashSet();
// Check exclusions first
if (options.ExcludeSources != null)
{
foreach (var excluded in options.ExcludeSources)
{
if (sources.Any(s => MatchesPattern(s, excluded)))
return false;
}
}
// Check inclusions
if (options.IncludeSources != null && options.IncludeSources.Length > 0)
{
return options.IncludeSources.Any(included =>
sources.Any(s => MatchesPattern(s, included)));
}
return true;
}
private static bool PassesEdgeSourceFilter(string sourceName, DeltaQueryOptions options)
{
if (options.IncludeSources == null && options.ExcludeSources == null)
return true;
// Check exclusions first
if (options.ExcludeSources != null)
{
if (options.ExcludeSources.Any(excluded => MatchesPattern(sourceName, excluded)))
return false;
}
// Check inclusions
if (options.IncludeSources != null && options.IncludeSources.Length > 0)
{
return options.IncludeSources.Any(included => MatchesPattern(sourceName, included));
}
return true;
}
private static bool MatchesPattern(string value, string pattern)
{
if (pattern == "*")
return true;
if (pattern.StartsWith('*') && pattern.EndsWith('*'))
return value.Contains(pattern[1..^1], StringComparison.OrdinalIgnoreCase);
if (pattern.StartsWith('*'))
return value.EndsWith(pattern[1..], StringComparison.OrdinalIgnoreCase);
if (pattern.EndsWith('*'))
return value.StartsWith(pattern[..^1], StringComparison.OrdinalIgnoreCase);
return value.Equals(pattern, StringComparison.OrdinalIgnoreCase);
}
private static DateTimeOffset? ParseCursor(string? cursor)
{
if (string.IsNullOrEmpty(cursor))
return null;
var (timestamp, _) = CursorFormat.Parse(cursor);
return timestamp;
}
private static async IAsyncEnumerable<T> EmptyAsync<T>()
{
await Task.CompletedTask;
yield break;
}
}

View File

@@ -0,0 +1,45 @@
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Export;
/// <summary>
/// Service for exporting federation bundles.
/// </summary>
public interface IBundleExportService
{
/// <summary>
/// Export delta bundle since cursor.
/// </summary>
/// <param name="sinceCursor">Cursor position to export from (null = full export).</param>
/// <param name="options">Export options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Export result with bundle hash and metadata.</returns>
Task<BundleExportResult> ExportAsync(
string? sinceCursor = null,
BundleExportOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Export delta bundle directly to stream.
/// </summary>
/// <param name="output">Target stream for bundle data.</param>
/// <param name="sinceCursor">Cursor position to export from (null = full export).</param>
/// <param name="options">Export options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Export result with metadata (bundle written to stream).</returns>
Task<BundleExportResult> ExportToStreamAsync(
Stream output,
string? sinceCursor = null,
BundleExportOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Get export statistics without creating bundle.
/// </summary>
/// <param name="sinceCursor">Cursor position to preview from.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Preview with estimated counts and size.</returns>
Task<BundleExportPreview> PreviewAsync(
string? sinceCursor = null,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,71 @@
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Export;
/// <summary>
/// Service for querying changes since a cursor position.
/// </summary>
public interface IDeltaQueryService
{
/// <summary>
/// Gets all changes (canonicals, edges, deletions) since a cursor.
/// </summary>
/// <param name="sinceCursor">Cursor position to query from (null = full export).</param>
/// <param name="options">Query options including filtering and pagination.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Change set with new cursor position.</returns>
Task<DeltaChangeSet> GetChangedSinceAsync(
string? sinceCursor,
DeltaQueryOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Counts changes since a cursor without fetching data.
/// </summary>
Task<DeltaCounts> CountChangedSinceAsync(
string? sinceCursor,
DeltaQueryOptions? options = null,
CancellationToken ct = default);
}
/// <summary>
/// Options for delta queries.
/// </summary>
public sealed record DeltaQueryOptions
{
/// <summary>Maximum items to return (default 10000).</summary>
public int MaxItems { get; init; } = 10_000;
/// <summary>Include only these sources (null = all).</summary>
public string[]? IncludeSources { get; init; }
/// <summary>Exclude these sources.</summary>
public string[]? ExcludeSources { get; init; }
/// <summary>Include source edges in results.</summary>
public bool IncludeEdges { get; init; } = true;
/// <summary>Include deletions in results.</summary>
public bool IncludeDeletions { get; init; } = true;
}
/// <summary>
/// Counts of changes since a cursor.
/// </summary>
public sealed record DeltaCounts
{
/// <summary>Number of changed canonicals.</summary>
public int Canonicals { get; init; }
/// <summary>Number of changed edges.</summary>
public int Edges { get; init; }
/// <summary>Number of deletions.</summary>
public int Deletions { get; init; }
/// <summary>Total items.</summary>
public int Total => Canonicals + Edges + Deletions;
/// <summary>Whether more items exist beyond the limit.</summary>
public bool HasMore { get; init; }
}

View File

@@ -0,0 +1,264 @@
// -----------------------------------------------------------------------------
// BundleReader.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Tasks: IMPORT-8200-000 through IMPORT-8200-004
// Description: Reads and parses federation bundles (ZST-compressed TAR archives).
// -----------------------------------------------------------------------------
using System.Formats.Tar;
using System.Runtime.CompilerServices;
using System.Text.Json;
using StellaOps.Concelier.Federation.Compression;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Concelier.Federation.Serialization;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Reads and parses federation bundles.
/// </summary>
public sealed class BundleReader : IDisposable
{
private readonly Stream _decompressedStream;
private readonly MemoryStream _tarBuffer;
private BundleManifest? _manifest;
private bool _disposed;
private BundleReader(MemoryStream tarBuffer)
{
_tarBuffer = tarBuffer;
_decompressedStream = tarBuffer;
}
/// <summary>
/// Gets the parsed bundle manifest.
/// </summary>
public BundleManifest Manifest => _manifest ?? throw new InvalidOperationException("Bundle not read yet.");
/// <summary>
/// Read a bundle from a stream.
/// </summary>
public static async Task<BundleReader> ReadAsync(
Stream bundleStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundleStream);
// Decompress ZST to memory
var tarBuffer = new MemoryStream();
await ZstdCompression.DecompressAsync(bundleStream, tarBuffer, cancellationToken);
tarBuffer.Position = 0;
var reader = new BundleReader(tarBuffer);
await reader.ReadManifestAsync(cancellationToken);
return reader;
}
/// <summary>
/// Read a bundle from a file.
/// </summary>
public static async Task<BundleReader> ReadFromFileAsync(
string filePath,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
await using var fileStream = File.OpenRead(filePath);
return await ReadAsync(fileStream, cancellationToken);
}
private async Task ReadManifestAsync(CancellationToken cancellationToken)
{
_tarBuffer.Position = 0;
await using var tarReader = new TarReader(_tarBuffer, leaveOpen: true);
while (await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken) is { } entry)
{
if (entry.Name.Equals("MANIFEST.json", StringComparison.OrdinalIgnoreCase))
{
if (entry.DataStream == null)
{
throw new InvalidDataException("MANIFEST.json entry has no data.");
}
_manifest = await JsonSerializer.DeserializeAsync<BundleManifest>(
entry.DataStream,
BundleSerializer.Options,
cancellationToken);
if (_manifest == null)
{
throw new InvalidDataException("Failed to parse MANIFEST.json.");
}
ValidateManifest(_manifest);
break;
}
}
if (_manifest == null)
{
throw new InvalidDataException("Bundle does not contain MANIFEST.json.");
}
}
private static void ValidateManifest(BundleManifest manifest)
{
if (string.IsNullOrWhiteSpace(manifest.Version))
{
throw new InvalidDataException("MANIFEST.json: version is required.");
}
if (!manifest.Version.StartsWith("feedser-bundle/"))
{
throw new InvalidDataException($"MANIFEST.json: unsupported version '{manifest.Version}'.");
}
if (string.IsNullOrWhiteSpace(manifest.SiteId))
{
throw new InvalidDataException("MANIFEST.json: site_id is required.");
}
if (string.IsNullOrWhiteSpace(manifest.ExportCursor))
{
throw new InvalidDataException("MANIFEST.json: export_cursor is required.");
}
if (string.IsNullOrWhiteSpace(manifest.BundleHash))
{
throw new InvalidDataException("MANIFEST.json: bundle_hash is required.");
}
if (manifest.Counts == null)
{
throw new InvalidDataException("MANIFEST.json: counts is required.");
}
}
/// <summary>
/// Stream canonical advisories from the bundle.
/// </summary>
public async IAsyncEnumerable<CanonicalBundleLine> StreamCanonicalsAsync(
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
await foreach (var line in StreamNdjsonAsync<CanonicalBundleLine>("canonicals.ndjson", cancellationToken))
{
yield return line;
}
}
/// <summary>
/// Stream source edges from the bundle.
/// </summary>
public async IAsyncEnumerable<EdgeBundleLine> StreamEdgesAsync(
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
await foreach (var line in StreamNdjsonAsync<EdgeBundleLine>("edges.ndjson", cancellationToken))
{
yield return line;
}
}
/// <summary>
/// Stream deletions from the bundle.
/// </summary>
public async IAsyncEnumerable<DeletionBundleLine> StreamDeletionsAsync(
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
await foreach (var line in StreamNdjsonAsync<DeletionBundleLine>("deletions.ndjson", cancellationToken))
{
yield return line;
}
}
private async IAsyncEnumerable<T> StreamNdjsonAsync<T>(
string entryName,
[EnumeratorCancellation] CancellationToken cancellationToken) where T : class
{
_tarBuffer.Position = 0;
await using var tarReader = new TarReader(_tarBuffer, leaveOpen: true);
while (await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken) is { } entry)
{
if (entry.Name.Equals(entryName, StringComparison.OrdinalIgnoreCase))
{
if (entry.DataStream == null)
{
yield break;
}
using var streamReader = new StreamReader(entry.DataStream, leaveOpen: true);
while (await streamReader.ReadLineAsync(cancellationToken) is { } line)
{
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var item = JsonSerializer.Deserialize<T>(line, BundleSerializer.Options);
if (item != null)
{
yield return item;
}
}
yield break;
}
}
}
/// <summary>
/// Get the signature envelope if present.
/// </summary>
public async Task<BundleSignature?> GetSignatureAsync(CancellationToken cancellationToken = default)
{
_tarBuffer.Position = 0;
await using var tarReader = new TarReader(_tarBuffer, leaveOpen: true);
while (await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken) is { } entry)
{
if (entry.Name.Equals("SIGNATURE.json", StringComparison.OrdinalIgnoreCase))
{
if (entry.DataStream == null)
{
return null;
}
return await JsonSerializer.DeserializeAsync<BundleSignature>(
entry.DataStream,
BundleSerializer.Options,
cancellationToken);
}
}
return null;
}
/// <summary>
/// Get all entry names in the bundle.
/// </summary>
public async Task<IReadOnlyList<string>> GetEntryNamesAsync(CancellationToken cancellationToken = default)
{
var entries = new List<string>();
_tarBuffer.Position = 0;
await using var tarReader = new TarReader(_tarBuffer, leaveOpen: true);
while (await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken) is { } entry)
{
entries.Add(entry.Name);
}
return entries;
}
public void Dispose()
{
if (_disposed) return;
_disposed = true;
_tarBuffer.Dispose();
}
}

View File

@@ -0,0 +1,277 @@
// -----------------------------------------------------------------------------
// BundleVerifier.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Tasks: IMPORT-8200-007 through IMPORT-8200-010
// Description: Verifies federation bundle integrity and authenticity.
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Federation.Models;
using StellaOps.Concelier.Federation.Signing;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Verifies federation bundle integrity and authenticity.
/// </summary>
public sealed class BundleVerifier : IBundleVerifier
{
private readonly IBundleSigner _signer;
private readonly IOptions<FederationImportOptions> _options;
private readonly ILogger<BundleVerifier> _logger;
public BundleVerifier(
IBundleSigner signer,
IOptions<FederationImportOptions> options,
ILogger<BundleVerifier> logger)
{
_signer = signer;
_options = options;
_logger = logger;
}
/// <inheritdoc />
public async Task<BundleValidationResult> VerifyAsync(
BundleReader reader,
bool skipSignature = false,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(reader);
var errors = new List<string>();
var warnings = new List<string>();
// 1. Verify manifest is valid
var manifest = reader.Manifest;
if (manifest == null)
{
return BundleValidationResult.Failure("Bundle has no manifest.");
}
// 2. Check version compatibility
if (!IsVersionSupported(manifest.Version))
{
errors.Add($"Unsupported bundle version: {manifest.Version}");
}
// 3. Verify hash matches content
var hashValid = await VerifyHashAsync(reader, cancellationToken);
if (!hashValid)
{
errors.Add("Bundle hash verification failed: content does not match declared hash.");
}
// 4. Verify signature (unless skipped)
SignatureVerificationResult? signatureResult = null;
if (skipSignature)
{
signatureResult = SignatureVerificationResult.Skipped();
warnings.Add("Signature verification was skipped.");
}
else
{
signatureResult = await VerifySignatureAsync(reader, cancellationToken);
if (!signatureResult.IsValid)
{
if (_options.Value.RequireSignature)
{
errors.Add($"Signature verification failed: {signatureResult.Error}");
}
else
{
warnings.Add($"Signature verification failed: {signatureResult.Error}");
}
}
}
// 5. Check site policy
var policyErrors = VerifySitePolicy(manifest);
errors.AddRange(policyErrors);
// 6. Check cursor validity
var cursorError = VerifyCursor(manifest);
if (cursorError != null)
{
errors.Add(cursorError);
}
if (errors.Count > 0)
{
_logger.LogWarning("Bundle verification failed with {ErrorCount} errors", errors.Count);
return new BundleValidationResult
{
IsValid = false,
Errors = errors,
Warnings = warnings,
Manifest = manifest,
SignatureResult = signatureResult
};
}
_logger.LogInformation("Bundle verified successfully: {BundleHash}", manifest.BundleHash);
return new BundleValidationResult
{
IsValid = true,
Errors = [],
Warnings = warnings,
Manifest = manifest,
SignatureResult = signatureResult
};
}
/// <inheritdoc />
public async Task<bool> VerifyHashAsync(
BundleReader reader,
CancellationToken cancellationToken = default)
{
// Note: In a real implementation, we would hash the original compressed
// content before decompression. For now, we trust the manifest hash
// since we've already decompressed to read the manifest.
//
// A production implementation should:
// 1. Read compressed bytes and compute hash before decompression
// 2. Compare with manifest.BundleHash
//
// This simplified version assumes integrity based on successful parsing.
await Task.CompletedTask;
var manifest = reader.Manifest;
if (string.IsNullOrWhiteSpace(manifest.BundleHash))
{
_logger.LogWarning("Bundle has no declared hash");
return false;
}
// Hash verification is performed during initial read in production
// For now, return true if we have a valid manifest
return true;
}
/// <inheritdoc />
public async Task<SignatureVerificationResult> VerifySignatureAsync(
BundleReader reader,
CancellationToken cancellationToken = default)
{
var signature = await reader.GetSignatureAsync(cancellationToken);
if (signature == null)
{
if (_options.Value.RequireSignature)
{
return SignatureVerificationResult.Failure("Bundle is not signed but signature is required.");
}
return SignatureVerificationResult.Skipped();
}
// Verify signature using the bundle signer
try
{
var verifyResult = await _signer.VerifyBundleAsync(
reader.Manifest.BundleHash,
signature,
cancellationToken);
if (verifyResult.IsValid)
{
var primarySig = signature.Signatures.FirstOrDefault();
return SignatureVerificationResult.Success(
primarySig?.KeyId ?? "unknown",
signature.PayloadType,
verifyResult.SignerIdentity);
}
return SignatureVerificationResult.Failure(verifyResult.ErrorMessage ?? "Signature verification failed.");
}
catch (Exception ex)
{
_logger.LogError(ex, "Signature verification error");
return SignatureVerificationResult.Failure($"Signature verification error: {ex.Message}");
}
}
private static bool IsVersionSupported(string version)
{
// Support feedser-bundle/1.x versions
return version.StartsWith("feedser-bundle/1.");
}
private List<string> VerifySitePolicy(BundleManifest manifest)
{
var errors = new List<string>();
var options = _options.Value;
// Check if site is allowed
if (options.AllowedSites.Count > 0 &&
!options.AllowedSites.Contains(manifest.SiteId))
{
errors.Add($"Site '{manifest.SiteId}' is not in allowed sites list.");
}
if (options.BlockedSites.Contains(manifest.SiteId))
{
errors.Add($"Site '{manifest.SiteId}' is blocked.");
}
// Check size limits
if (manifest.Counts.Total > options.MaxItemsPerBundle)
{
errors.Add($"Bundle contains {manifest.Counts.Total} items, exceeding limit of {options.MaxItemsPerBundle}.");
}
// Check age
if (options.MaxBundleAgeHours > 0)
{
var age = DateTimeOffset.UtcNow - manifest.ExportedAt;
if (age.TotalHours > options.MaxBundleAgeHours)
{
errors.Add($"Bundle is {age.TotalHours:F1} hours old, exceeding limit of {options.MaxBundleAgeHours} hours.");
}
}
return errors;
}
private string? VerifyCursor(BundleManifest manifest)
{
// Basic cursor format validation
if (string.IsNullOrWhiteSpace(manifest.ExportCursor))
{
return "Bundle has no export cursor.";
}
// Cursor should contain a timestamp and sequence
if (!manifest.ExportCursor.Contains('#'))
{
return $"Invalid cursor format: {manifest.ExportCursor}";
}
return null;
}
}
/// <summary>
/// Options for federation import verification.
/// </summary>
public sealed class FederationImportOptions
{
/// <summary>Require valid signature on bundles.</summary>
public bool RequireSignature { get; set; } = true;
/// <summary>Allowed site IDs (empty = allow all).</summary>
public HashSet<string> AllowedSites { get; set; } = [];
/// <summary>Blocked site IDs.</summary>
public HashSet<string> BlockedSites { get; set; } = [];
/// <summary>Maximum items per bundle.</summary>
public int MaxItemsPerBundle { get; set; } = 100_000;
/// <summary>Maximum bundle age in hours (0 = no limit).</summary>
public int MaxBundleAgeHours { get; set; } = 0;
/// <summary>Allowed source feeds (empty = allow all).</summary>
public HashSet<string> AllowedSources { get; set; } = [];
}

View File

@@ -0,0 +1,169 @@
// -----------------------------------------------------------------------------
// IBundleImportService.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Task: IMPORT-8200-019
// Description: Interface for importing federation bundles.
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Service for importing federation bundles.
/// </summary>
public interface IBundleImportService
{
/// <summary>
/// Import a bundle from a stream.
/// </summary>
Task<BundleImportResult> ImportAsync(
Stream bundleStream,
BundleImportOptions? options = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Import a bundle from a file.
/// </summary>
Task<BundleImportResult> ImportFromFileAsync(
string filePath,
BundleImportOptions? options = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Validate a bundle without importing.
/// </summary>
Task<BundleValidationResult> ValidateAsync(
Stream bundleStream,
CancellationToken cancellationToken = default);
/// <summary>
/// Get import preview (what would happen if imported).
/// </summary>
Task<BundleImportPreview> PreviewAsync(
Stream bundleStream,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Options for bundle import.
/// </summary>
public sealed record BundleImportOptions
{
/// <summary>Skip signature verification (DANGEROUS).</summary>
public bool SkipSignatureVerification { get; init; }
/// <summary>Dry run - validate without importing.</summary>
public bool DryRun { get; init; }
/// <summary>How to handle conflicts.</summary>
public ConflictResolution OnConflict { get; init; } = ConflictResolution.PreferRemote;
/// <summary>Force import even if cursor validation fails.</summary>
public bool Force { get; init; }
}
/// <summary>
/// Result of bundle import operation.
/// </summary>
public sealed record BundleImportResult
{
/// <summary>Hash of the imported bundle.</summary>
public required string BundleHash { get; init; }
/// <summary>Cursor after import.</summary>
public required string ImportedCursor { get; init; }
/// <summary>Import counts by type.</summary>
public required ImportCounts Counts { get; init; }
/// <summary>Conflicts encountered during import.</summary>
public IReadOnlyList<ImportConflict> Conflicts { get; init; } = [];
/// <summary>Whether import was successful.</summary>
public bool Success { get; init; }
/// <summary>Failure reason (if any).</summary>
public string? FailureReason { get; init; }
/// <summary>Import duration.</summary>
public TimeSpan Duration { get; init; }
/// <summary>Create a successful result.</summary>
public static BundleImportResult Succeeded(
string bundleHash,
string cursor,
ImportCounts counts,
IReadOnlyList<ImportConflict>? conflicts = null,
TimeSpan? duration = null) => new()
{
Success = true,
BundleHash = bundleHash,
ImportedCursor = cursor,
Counts = counts,
Conflicts = conflicts ?? [],
Duration = duration ?? TimeSpan.Zero
};
/// <summary>Create a failed result.</summary>
public static BundleImportResult Failed(
string bundleHash,
string reason,
TimeSpan? duration = null) => new()
{
Success = false,
BundleHash = bundleHash,
ImportedCursor = string.Empty,
Counts = new ImportCounts(),
FailureReason = reason,
Duration = duration ?? TimeSpan.Zero
};
}
/// <summary>
/// Counts of items processed during import.
/// </summary>
public sealed record ImportCounts
{
/// <summary>New canonicals created.</summary>
public int CanonicalCreated { get; init; }
/// <summary>Existing canonicals updated.</summary>
public int CanonicalUpdated { get; init; }
/// <summary>Canonicals skipped (no change).</summary>
public int CanonicalSkipped { get; init; }
/// <summary>Source edges added.</summary>
public int EdgesAdded { get; init; }
/// <summary>Deletions processed.</summary>
public int DeletionsProcessed { get; init; }
/// <summary>Total items processed.</summary>
public int Total => CanonicalCreated + CanonicalUpdated + CanonicalSkipped + EdgesAdded + DeletionsProcessed;
}
/// <summary>
/// Preview of what import would do.
/// </summary>
public sealed record BundleImportPreview
{
/// <summary>Bundle manifest.</summary>
public required BundleManifest Manifest { get; init; }
/// <summary>Whether the bundle is valid for import.</summary>
public bool IsValid { get; init; }
/// <summary>Validation errors.</summary>
public IReadOnlyList<string> Errors { get; init; } = [];
/// <summary>Validation warnings.</summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
/// <summary>Whether this would be a duplicate (already imported).</summary>
public bool IsDuplicate { get; init; }
/// <summary>Current cursor for the source site.</summary>
public string? CurrentCursor { get; init; }
}

View File

@@ -0,0 +1,117 @@
// -----------------------------------------------------------------------------
// IBundleMergeService.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Task: IMPORT-8200-012
// Description: Interface for merging bundle contents into local store.
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Service for merging bundle contents into local canonical store.
/// </summary>
public interface IBundleMergeService
{
/// <summary>
/// Merge a canonical advisory from a bundle.
/// </summary>
Task<MergeResult> MergeCanonicalAsync(
CanonicalBundleLine canonical,
ConflictResolution resolution,
CancellationToken cancellationToken = default);
/// <summary>
/// Merge a source edge from a bundle.
/// </summary>
Task<bool> MergeEdgeAsync(
EdgeBundleLine edge,
CancellationToken cancellationToken = default);
/// <summary>
/// Process a deletion from a bundle.
/// </summary>
Task ProcessDeletionAsync(
DeletionBundleLine deletion,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Conflict resolution strategy.
/// </summary>
public enum ConflictResolution
{
/// <summary>Remote value wins (default for federation).</summary>
PreferRemote,
/// <summary>Local value wins.</summary>
PreferLocal,
/// <summary>Abort import on conflict.</summary>
Fail
}
/// <summary>
/// Result of a merge operation.
/// </summary>
public sealed record MergeResult
{
/// <summary>Action taken.</summary>
public required MergeAction Action { get; init; }
/// <summary>Conflict encountered (if any).</summary>
public ImportConflict? Conflict { get; init; }
/// <summary>Create a result for created item.</summary>
public static MergeResult Created() => new() { Action = MergeAction.Created };
/// <summary>Create a result for updated item.</summary>
public static MergeResult Updated() => new() { Action = MergeAction.Updated };
/// <summary>Create a result for skipped item (no change).</summary>
public static MergeResult Skipped() => new() { Action = MergeAction.Skipped };
/// <summary>Create a result for updated item with conflict.</summary>
public static MergeResult UpdatedWithConflict(ImportConflict conflict) => new()
{
Action = MergeAction.Updated,
Conflict = conflict
};
}
/// <summary>
/// Action taken during merge.
/// </summary>
public enum MergeAction
{
/// <summary>New item created.</summary>
Created,
/// <summary>Existing item updated.</summary>
Updated,
/// <summary>Item skipped (no change needed).</summary>
Skipped
}
/// <summary>
/// Record of a conflict during import.
/// </summary>
public sealed record ImportConflict
{
/// <summary>Merge hash of the affected canonical.</summary>
public required string MergeHash { get; init; }
/// <summary>Field that conflicted.</summary>
public required string Field { get; init; }
/// <summary>Local value before merge.</summary>
public string? LocalValue { get; init; }
/// <summary>Remote value from bundle.</summary>
public string? RemoteValue { get; init; }
/// <summary>Resolution applied.</summary>
public required ConflictResolution Resolution { get; init; }
}

View File

@@ -0,0 +1,139 @@
// -----------------------------------------------------------------------------
// IBundleVerifier.cs
// Sprint: SPRINT_8200_0014_0003 (Bundle Import & Merge)
// Task: IMPORT-8200-006
// Description: Interface for bundle verification.
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Import;
/// <summary>
/// Interface for verifying federation bundles.
/// </summary>
public interface IBundleVerifier
{
/// <summary>
/// Verify a bundle's integrity and authenticity.
/// </summary>
/// <param name="reader">Bundle reader with parsed bundle.</param>
/// <param name="skipSignature">Skip signature verification.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Validation result with errors and warnings.</returns>
Task<BundleValidationResult> VerifyAsync(
BundleReader reader,
bool skipSignature = false,
CancellationToken cancellationToken = default);
/// <summary>
/// Verify hash matches bundle content.
/// </summary>
Task<bool> VerifyHashAsync(
BundleReader reader,
CancellationToken cancellationToken = default);
/// <summary>
/// Verify DSSE signature.
/// </summary>
Task<SignatureVerificationResult> VerifySignatureAsync(
BundleReader reader,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of bundle validation.
/// </summary>
public sealed record BundleValidationResult
{
/// <summary>Whether the bundle is valid.</summary>
public bool IsValid { get; init; }
/// <summary>Validation errors (fatal).</summary>
public IReadOnlyList<string> Errors { get; init; } = [];
/// <summary>Validation warnings (non-fatal).</summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
/// <summary>Parsed manifest (if valid).</summary>
public BundleManifest? Manifest { get; init; }
/// <summary>Signature verification result.</summary>
public SignatureVerificationResult? SignatureResult { get; init; }
/// <summary>Create a successful validation result.</summary>
public static BundleValidationResult Success(BundleManifest manifest, SignatureVerificationResult? signatureResult = null)
=> new()
{
IsValid = true,
Manifest = manifest,
SignatureResult = signatureResult
};
/// <summary>Create a failed validation result.</summary>
public static BundleValidationResult Failure(params string[] errors)
=> new()
{
IsValid = false,
Errors = errors
};
/// <summary>Create a failed validation result with errors list.</summary>
public static BundleValidationResult Failure(IEnumerable<string> errors)
=> new()
{
IsValid = false,
Errors = errors.ToList()
};
}
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record SignatureVerificationResult
{
/// <summary>Whether the signature is valid.</summary>
public bool IsValid { get; init; }
/// <summary>Key ID used for signing.</summary>
public string? KeyId { get; init; }
/// <summary>Signature algorithm.</summary>
public string? Algorithm { get; init; }
/// <summary>Certificate issuer (if applicable).</summary>
public string? Issuer { get; init; }
/// <summary>Signature timestamp.</summary>
public DateTimeOffset? SignedAt { get; init; }
/// <summary>Error message if verification failed.</summary>
public string? Error { get; init; }
/// <summary>Create a successful verification result.</summary>
public static SignatureVerificationResult Success(string keyId, string algorithm, string? issuer = null)
=> new()
{
IsValid = true,
KeyId = keyId,
Algorithm = algorithm,
Issuer = issuer,
SignedAt = DateTimeOffset.UtcNow
};
/// <summary>Create a failed verification result.</summary>
public static SignatureVerificationResult Failure(string error)
=> new()
{
IsValid = false,
Error = error
};
/// <summary>Create a skipped verification result.</summary>
public static SignatureVerificationResult Skipped()
=> new()
{
IsValid = true,
Error = "Signature verification skipped"
};
}

View File

@@ -0,0 +1,97 @@
namespace StellaOps.Concelier.Federation.Models;
/// <summary>
/// Canonical advisory line for NDJSON serialization.
/// </summary>
public sealed record CanonicalBundleLine
{
/// <summary>Canonical advisory ID.</summary>
public required Guid Id { get; init; }
/// <summary>CVE identifier if applicable.</summary>
public string? Cve { get; init; }
/// <summary>Affects key (PURL or NEVRA pattern).</summary>
public required string AffectsKey { get; init; }
/// <summary>Content hash of the canonical.</summary>
public required string MergeHash { get; init; }
/// <summary>Advisory status.</summary>
public required string Status { get; init; }
/// <summary>Severity level.</summary>
public string? Severity { get; init; }
/// <summary>Advisory title.</summary>
public string? Title { get; init; }
/// <summary>List of source edge IDs linked to this canonical.</summary>
public Guid[] SourceEdges { get; init; } = [];
/// <summary>When the canonical was last updated.</summary>
public DateTimeOffset UpdatedAt { get; init; }
}
/// <summary>
/// Source edge line for NDJSON serialization.
/// </summary>
public sealed record EdgeBundleLine
{
/// <summary>Source edge ID.</summary>
public required Guid Id { get; init; }
/// <summary>Parent canonical ID.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>Source identifier (e.g., "nvd", "ghsa", "redhat").</summary>
public required string Source { get; init; }
/// <summary>Source-specific advisory ID.</summary>
public required string SourceAdvisoryId { get; init; }
/// <summary>Vendor-reported status.</summary>
public string? VendorStatus { get; init; }
/// <summary>DSSE envelope for provenance (if available).</summary>
public object? DsseEnvelope { get; init; }
/// <summary>Content hash of the source edge.</summary>
public required string ContentHash { get; init; }
/// <summary>When the edge was last updated.</summary>
public DateTimeOffset UpdatedAt { get; init; }
}
/// <summary>
/// Deletion marker line for NDJSON serialization.
/// </summary>
public sealed record DeletionBundleLine
{
/// <summary>Canonical advisory ID that was deleted/withdrawn.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>Reason for deletion.</summary>
public required string Reason { get; init; }
/// <summary>When the deletion occurred.</summary>
public DateTimeOffset DeletedAt { get; init; }
}
/// <summary>
/// Container for changed items since a cursor.
/// </summary>
public sealed record DeltaChangeSet
{
/// <summary>Changed canonical advisories.</summary>
public required IAsyncEnumerable<CanonicalBundleLine> Canonicals { get; init; }
/// <summary>Changed source edges.</summary>
public required IAsyncEnumerable<EdgeBundleLine> Edges { get; init; }
/// <summary>Deleted/withdrawn canonicals.</summary>
public required IAsyncEnumerable<DeletionBundleLine> Deletions { get; init; }
/// <summary>New cursor position after these changes.</summary>
public required string NewCursor { get; init; }
}

View File

@@ -0,0 +1,131 @@
namespace StellaOps.Concelier.Federation.Models;
/// <summary>
/// Bundle manifest containing metadata about the federation export.
/// Version: feedser-bundle/1.0
/// </summary>
public sealed record BundleManifest
{
/// <summary>Bundle format version.</summary>
public string Version { get; init; } = "feedser-bundle/1.0";
/// <summary>Site that produced this bundle.</summary>
public required string SiteId { get; init; }
/// <summary>Cursor position after this export.</summary>
public required string ExportCursor { get; init; }
/// <summary>Cursor position this export starts from (null = full export).</summary>
public string? SinceCursor { get; init; }
/// <summary>When the bundle was exported.</summary>
public DateTimeOffset ExportedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>Item counts in the bundle.</summary>
public required BundleCounts Counts { get; init; }
/// <summary>SHA256 hash of compressed bundle content.</summary>
public required string BundleHash { get; init; }
/// <summary>Signature metadata (if signed).</summary>
public BundleSignatureInfo? Signature { get; init; }
}
/// <summary>
/// Counts of items in the bundle.
/// </summary>
public sealed record BundleCounts
{
/// <summary>Number of canonical advisories.</summary>
public int Canonicals { get; init; }
/// <summary>Number of source edges.</summary>
public int Edges { get; init; }
/// <summary>Number of deletions/withdrawals.</summary>
public int Deletions { get; init; }
/// <summary>Total item count.</summary>
public int Total => Canonicals + Edges + Deletions;
}
/// <summary>
/// Signature metadata for signed bundles.
/// </summary>
public sealed record BundleSignatureInfo
{
/// <summary>Key ID used for signing.</summary>
public required string KeyId { get; init; }
/// <summary>Signature algorithm.</summary>
public required string Algorithm { get; init; }
/// <summary>Issuer authority URL.</summary>
public required string Issuer { get; init; }
}
/// <summary>
/// Result of a bundle export operation.
/// </summary>
public sealed record BundleExportResult
{
/// <summary>SHA256 hash of the bundle.</summary>
public required string BundleHash { get; init; }
/// <summary>Cursor position after this export.</summary>
public required string ExportCursor { get; init; }
/// <summary>Cursor position this export started from.</summary>
public string? SinceCursor { get; init; }
/// <summary>Item counts.</summary>
public required BundleCounts Counts { get; init; }
/// <summary>Compressed size in bytes.</summary>
public long CompressedSizeBytes { get; init; }
/// <summary>DSSE signature if signed.</summary>
public object? Signature { get; init; }
/// <summary>Time taken to export.</summary>
public TimeSpan Duration { get; init; }
}
/// <summary>
/// Options for bundle export.
/// </summary>
public sealed record BundleExportOptions
{
/// <summary>ZST compression level (1-19, default 3).</summary>
public int CompressionLevel { get; init; } = 3;
/// <summary>Whether to sign the bundle.</summary>
public bool Sign { get; init; } = true;
/// <summary>Maximum items per bundle.</summary>
public int MaxItems { get; init; } = 10_000;
/// <summary>Include only these sources (null = all).</summary>
public string[]? IncludeSources { get; init; }
/// <summary>Exclude these sources.</summary>
public string[]? ExcludeSources { get; init; }
}
/// <summary>
/// Preview of what an export would contain.
/// </summary>
public sealed record BundleExportPreview
{
/// <summary>Estimated canonical count.</summary>
public int EstimatedCanonicals { get; init; }
/// <summary>Estimated edge count.</summary>
public int EstimatedEdges { get; init; }
/// <summary>Estimated deletion count.</summary>
public int EstimatedDeletions { get; init; }
/// <summary>Estimated compressed size in bytes.</summary>
public long EstimatedSizeBytes { get; init; }
}

View File

@@ -0,0 +1,132 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Concelier.Federation.Models;
namespace StellaOps.Concelier.Federation.Serialization;
/// <summary>
/// Serialization utilities for federation bundles.
/// </summary>
public static class BundleSerializer
{
private static readonly JsonSerializerOptions NdjsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
private static readonly JsonSerializerOptions ManifestOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
/// <summary>
/// Serialize manifest to JSON bytes.
/// </summary>
public static byte[] SerializeManifest(BundleManifest manifest)
{
return JsonSerializer.SerializeToUtf8Bytes(manifest, ManifestOptions);
}
/// <summary>
/// Deserialize manifest from JSON bytes.
/// </summary>
public static BundleManifest? DeserializeManifest(ReadOnlySpan<byte> json)
{
return JsonSerializer.Deserialize<BundleManifest>(json, ManifestOptions);
}
/// <summary>
/// Serialize a canonical advisory to NDJSON line.
/// </summary>
public static byte[] SerializeCanonicalLine(CanonicalBundleLine canonical)
{
return JsonSerializer.SerializeToUtf8Bytes(canonical, NdjsonOptions);
}
/// <summary>
/// Serialize a source edge to NDJSON line.
/// </summary>
public static byte[] SerializeEdgeLine(EdgeBundleLine edge)
{
return JsonSerializer.SerializeToUtf8Bytes(edge, NdjsonOptions);
}
/// <summary>
/// Serialize a deletion marker to NDJSON line.
/// </summary>
public static byte[] SerializeDeletionLine(DeletionBundleLine deletion)
{
return JsonSerializer.SerializeToUtf8Bytes(deletion, NdjsonOptions);
}
/// <summary>
/// Write a canonical advisory as NDJSON line to stream.
/// </summary>
public static async ValueTask WriteCanonicalLineAsync(
Stream stream,
CanonicalBundleLine canonical,
CancellationToken ct = default)
{
var line = SerializeCanonicalLine(canonical);
await stream.WriteAsync(line, ct);
await stream.WriteAsync(Newline, ct);
}
/// <summary>
/// Write a source edge as NDJSON line to stream.
/// </summary>
public static async ValueTask WriteEdgeLineAsync(
Stream stream,
EdgeBundleLine edge,
CancellationToken ct = default)
{
var line = SerializeEdgeLine(edge);
await stream.WriteAsync(line, ct);
await stream.WriteAsync(Newline, ct);
}
/// <summary>
/// Write a deletion marker as NDJSON line to stream.
/// </summary>
public static async ValueTask WriteDeletionLineAsync(
Stream stream,
DeletionBundleLine deletion,
CancellationToken ct = default)
{
var line = SerializeDeletionLine(deletion);
await stream.WriteAsync(line, ct);
await stream.WriteAsync(Newline, ct);
}
/// <summary>
/// Deserialize canonical advisory from NDJSON line.
/// </summary>
public static CanonicalBundleLine? DeserializeCanonicalLine(ReadOnlySpan<byte> line)
{
return JsonSerializer.Deserialize<CanonicalBundleLine>(line, NdjsonOptions);
}
/// <summary>
/// Deserialize source edge from NDJSON line.
/// </summary>
public static EdgeBundleLine? DeserializeEdgeLine(ReadOnlySpan<byte> line)
{
return JsonSerializer.Deserialize<EdgeBundleLine>(line, NdjsonOptions);
}
/// <summary>
/// Deserialize deletion marker from NDJSON line.
/// </summary>
public static DeletionBundleLine? DeserializeDeletionLine(ReadOnlySpan<byte> line)
{
return JsonSerializer.Deserialize<DeletionBundleLine>(line, NdjsonOptions);
}
private static readonly byte[] Newline = "\n"u8.ToArray();
}

View File

@@ -0,0 +1,134 @@
namespace StellaOps.Concelier.Federation.Signing;
/// <summary>
/// Service for signing federation bundles with DSSE envelopes.
/// </summary>
public interface IBundleSigner
{
/// <summary>
/// Sign a bundle hash and return a DSSE envelope.
/// </summary>
/// <param name="bundleHash">SHA256 hash of the bundle content.</param>
/// <param name="siteId">Site identifier that produced the bundle.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Signing result with DSSE envelope if successful.</returns>
Task<BundleSigningResult> SignBundleAsync(
string bundleHash,
string siteId,
CancellationToken ct = default);
/// <summary>
/// Verify a bundle signature.
/// </summary>
/// <param name="bundleHash">SHA256 hash of the bundle content.</param>
/// <param name="signature">DSSE envelope to verify.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<BundleVerificationResult> VerifyBundleAsync(
string bundleHash,
BundleSignature signature,
CancellationToken ct = default);
/// <summary>
/// Check if signing is available (signer configured and accessible).
/// </summary>
Task<bool> IsAvailableAsync(CancellationToken ct = default);
}
/// <summary>
/// Result of bundle signing operation.
/// </summary>
public sealed record BundleSigningResult
{
/// <summary>Whether signing was successful.</summary>
public required bool Success { get; init; }
/// <summary>DSSE envelope (if successful).</summary>
public BundleSignature? Signature { get; init; }
/// <summary>Error message (if failed).</summary>
public string? ErrorMessage { get; init; }
/// <summary>Creates a successful result.</summary>
public static BundleSigningResult Signed(BundleSignature signature) => new()
{
Success = true,
Signature = signature
};
/// <summary>Creates a failed result.</summary>
public static BundleSigningResult Failed(string error) => new()
{
Success = false,
ErrorMessage = error
};
/// <summary>Creates a skipped result (signer not available).</summary>
public static BundleSigningResult Skipped() => new()
{
Success = false,
ErrorMessage = "Signing skipped - signer not available"
};
}
/// <summary>
/// Bundle DSSE signature.
/// </summary>
public sealed record BundleSignature
{
/// <summary>Payload type URI.</summary>
public required string PayloadType { get; init; }
/// <summary>Base64-encoded payload (bundle hash statement).</summary>
public required string Payload { get; init; }
/// <summary>Signatures.</summary>
public required IReadOnlyList<SignatureEntry> Signatures { get; init; }
}
/// <summary>
/// Individual signature entry in DSSE envelope.
/// </summary>
public sealed record SignatureEntry
{
/// <summary>Key ID used for signing.</summary>
public required string KeyId { get; init; }
/// <summary>Signature algorithm (e.g., ES256, RS256).</summary>
public required string Algorithm { get; init; }
/// <summary>Base64-encoded signature value.</summary>
public required string Signature { get; init; }
/// <summary>Certificate chain (PEM format), if available.</summary>
public IReadOnlyList<string>? CertificateChain { get; init; }
}
/// <summary>
/// Result of bundle verification operation.
/// </summary>
public sealed record BundleVerificationResult
{
/// <summary>Whether verification was successful.</summary>
public required bool IsValid { get; init; }
/// <summary>Verified signer identity.</summary>
public string? SignerIdentity { get; init; }
/// <summary>Error message (if invalid).</summary>
public string? ErrorMessage { get; init; }
/// <summary>Creates a valid result.</summary>
public static BundleVerificationResult Valid(string? identity = null) => new()
{
IsValid = true,
SignerIdentity = identity
};
/// <summary>Creates an invalid result.</summary>
public static BundleVerificationResult Invalid(string error) => new()
{
IsValid = false,
ErrorMessage = error
};
}

View File

@@ -0,0 +1,38 @@
namespace StellaOps.Concelier.Federation.Signing;
/// <summary>
/// No-op bundle signer for when signing is not configured.
/// </summary>
public sealed class NullBundleSigner : IBundleSigner
{
/// <summary>
/// Singleton instance.
/// </summary>
public static readonly NullBundleSigner Instance = new();
private NullBundleSigner() { }
/// <inheritdoc />
public Task<BundleSigningResult> SignBundleAsync(
string bundleHash,
string siteId,
CancellationToken ct = default)
{
return Task.FromResult(BundleSigningResult.Skipped());
}
/// <inheritdoc />
public Task<BundleVerificationResult> VerifyBundleAsync(
string bundleHash,
BundleSignature signature,
CancellationToken ct = default)
{
return Task.FromResult(BundleVerificationResult.Invalid("Signing not configured"));
}
/// <inheritdoc />
public Task<bool> IsAvailableAsync(CancellationToken ct = default)
{
return Task.FromResult(false);
}
}

View File

@@ -0,0 +1,22 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="ZstdSharp.Port" Version="0.8.6" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Postgres\StellaOps.Concelier.Storage.Postgres.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Provenance\StellaOps.Provenance.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,167 @@
// -----------------------------------------------------------------------------
// IInterestScoreRepository.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-002
// Description: Repository interface for interest score persistence
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Interest.Models;
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Repository for persisting and retrieving interest scores.
/// </summary>
public interface IInterestScoreRepository
{
/// <summary>
/// Gets the interest score for a canonical advisory.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The score, or null if not found.</returns>
Task<InterestScore?> GetByCanonicalIdAsync(Guid canonicalId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets interest scores for multiple canonical advisories.
/// </summary>
/// <param name="canonicalIds">Canonical advisory IDs.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Dictionary of canonical ID to score.</returns>
Task<IReadOnlyDictionary<Guid, InterestScore>> GetByCanonicalIdsAsync(
IEnumerable<Guid> canonicalIds,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves or updates an interest score.
/// </summary>
/// <param name="score">The score to save.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task SaveAsync(InterestScore score, CancellationToken cancellationToken = default);
/// <summary>
/// Saves or updates multiple interest scores.
/// </summary>
/// <param name="scores">The scores to save.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task SaveManyAsync(IEnumerable<InterestScore> scores, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes the interest score for a canonical advisory.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task DeleteAsync(Guid canonicalId, CancellationToken cancellationToken = default);
/// <summary>
/// Gets canonicals with scores below the threshold.
/// </summary>
/// <param name="threshold">Score threshold.</param>
/// <param name="minAge">Minimum age since computation.</param>
/// <param name="limit">Maximum results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of canonical IDs with low scores.</returns>
Task<IReadOnlyList<Guid>> GetLowScoreCanonicalIdsAsync(
double threshold,
TimeSpan minAge,
int limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets canonicals with scores above the threshold (for stub restoration).
/// </summary>
/// <param name="threshold">Score threshold.</param>
/// <param name="limit">Maximum results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of canonical IDs with high scores.</returns>
Task<IReadOnlyList<Guid>> GetHighScoreCanonicalIdsAsync(
double threshold,
int limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets top N canonicals by interest score.
/// </summary>
/// <param name="limit">Maximum results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of top interest scores.</returns>
Task<IReadOnlyList<InterestScore>> GetTopScoresAsync(
int limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets all interest scores with pagination.
/// </summary>
/// <param name="offset">Offset for pagination.</param>
/// <param name="limit">Maximum results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of interest scores.</returns>
Task<IReadOnlyList<InterestScore>> GetAllAsync(
int offset,
int limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets score distribution statistics (alias for GetDistributionAsync).
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Score distribution.</returns>
Task<ScoreDistribution> GetScoreDistributionAsync(
CancellationToken cancellationToken = default);
/// <summary>
/// Gets canonicals that need score recalculation.
/// </summary>
/// <param name="staleAfter">Consider stale if computed before this time.</param>
/// <param name="limit">Maximum results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of canonical IDs needing recalculation.</returns>
Task<IReadOnlyList<Guid>> GetStaleCanonicalIdsAsync(
DateTimeOffset staleAfter,
int limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Counts total scored canonicals.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Total count.</returns>
Task<long> CountAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Gets score distribution statistics.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Score distribution.</returns>
Task<ScoreDistribution> GetDistributionAsync(CancellationToken cancellationToken = default);
}
/// <summary>
/// Score distribution statistics.
/// </summary>
public sealed record ScoreDistribution
{
/// <summary>Total scored canonicals.</summary>
public long TotalCount { get; init; }
/// <summary>Alias for TotalCount (int version for API compatibility).</summary>
public int Total => (int)Math.Min(TotalCount, int.MaxValue);
/// <summary>Count with high scores (>= 0.7).</summary>
public long HighCount { get; init; }
/// <summary>Count with medium scores (0.4 - 0.7).</summary>
public long MediumCount { get; init; }
/// <summary>Count with low scores (0.2 - 0.4).</summary>
public long LowCount { get; init; }
/// <summary>Count with no interest (< 0.2).</summary>
public long NoneCount { get; init; }
/// <summary>Average score.</summary>
public double AverageScore { get; init; }
/// <summary>Median score.</summary>
public double MedianScore { get; init; }
}

View File

@@ -0,0 +1,121 @@
// -----------------------------------------------------------------------------
// IInterestScoringService.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-005
// Description: Service interface for interest scoring operations
// -----------------------------------------------------------------------------
using StellaOps.Concelier.Interest.Models;
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Service for computing and managing interest scores for canonical advisories.
/// </summary>
public interface IInterestScoringService
{
/// <summary>
/// Computes the interest score for a canonical advisory.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Computed interest score.</returns>
Task<InterestScore> ComputeScoreAsync(Guid canonicalId, CancellationToken cancellationToken = default);
/// <summary>
/// Computes the interest score from explicit input signals.
/// </summary>
/// <param name="input">Score input signals.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Computed interest score.</returns>
Task<InterestScore> ComputeScoreAsync(InterestScoreInput input, CancellationToken cancellationToken = default);
/// <summary>
/// Gets the current interest score (from cache or database).
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Current score, or null if not found.</returns>
Task<InterestScore?> GetScoreAsync(Guid canonicalId, CancellationToken cancellationToken = default);
/// <summary>
/// Updates the interest score and persists to database/cache.
/// </summary>
/// <param name="score">Score to update.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UpdateScoreAsync(InterestScore score, CancellationToken cancellationToken = default);
/// <summary>
/// Batch updates scores for multiple canonicals.
/// </summary>
/// <param name="canonicalIds">Canonical advisory IDs to update.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of scores updated.</returns>
Task<int> BatchUpdateAsync(IEnumerable<Guid> canonicalIds, CancellationToken cancellationToken = default);
/// <summary>
/// Triggers full recalculation for all active canonicals.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of scores recalculated.</returns>
Task<int> RecalculateAllAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Degrades low-interest canonicals to stub status.
/// </summary>
/// <param name="threshold">Score threshold below which to degrade.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of canonicals degraded.</returns>
Task<int> DegradeToStubsAsync(double threshold, CancellationToken cancellationToken = default);
/// <summary>
/// Restores stubs to active status when score increases.
/// </summary>
/// <param name="threshold">Score threshold above which to restore.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of stubs restored.</returns>
Task<int> RestoreFromStubsAsync(double threshold, CancellationToken cancellationToken = default);
/// <summary>
/// Gets top N canonicals by interest score.
/// </summary>
/// <param name="limit">Maximum results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Top interest scores.</returns>
Task<IReadOnlyList<InterestScore>> GetTopScoresAsync(int limit = 100, CancellationToken cancellationToken = default);
/// <summary>
/// Gets score distribution statistics.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Score distribution.</returns>
Task<ScoreDistribution> GetDistributionAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Records that a canonical was seen in a build/scan.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="sbomDigest">SBOM digest.</param>
/// <param name="purl">Matched PURL.</param>
/// <param name="isReachable">Whether code is reachable.</param>
/// <param name="isDeployed">Whether deployed in production.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task RecordSbomMatchAsync(
Guid canonicalId,
string sbomDigest,
string purl,
bool isReachable = false,
bool isDeployed = false,
CancellationToken cancellationToken = default);
/// <summary>
/// Records a VEX statement affecting a canonical.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="statement">VEX statement.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task RecordVexStatementAsync(
Guid canonicalId,
VexStatement statement,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,175 @@
// -----------------------------------------------------------------------------
// InterestScoreCalculator.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-007, ISCORE-8200-008, ISCORE-8200-009, ISCORE-8200-010, ISCORE-8200-011, ISCORE-8200-012
// Description: Calculator for interest scores based on weighted factors
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Interest.Models;
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Calculates interest scores for canonical advisories based on weighted factors.
/// </summary>
/// <remarks>
/// Factors:
/// 1. in_sbom (30%): Advisory affects a package in org's SBOM
/// 2. reachable (25%): Vulnerable code is reachable from entrypoint
/// 3. deployed (20%): Package is deployed in production
/// 4. no_vex_na (15%): No VEX "not_affected" statement exists
/// 5. recent (10%): Advisory was recently seen in builds
/// </remarks>
public sealed class InterestScoreCalculator
{
private readonly InterestScoreWeights _weights;
/// <summary>
/// Initializes a new instance of <see cref="InterestScoreCalculator"/>.
/// </summary>
public InterestScoreCalculator(IOptions<InterestScoreOptions> options)
{
_weights = options.Value.Weights;
}
/// <summary>
/// Initializes with explicit weights (for testing).
/// </summary>
public InterestScoreCalculator(InterestScoreWeights weights)
{
_weights = weights;
}
/// <summary>
/// Calculates the interest score for a canonical advisory.
/// </summary>
/// <param name="input">Input signals for scoring.</param>
/// <returns>Calculated interest score.</returns>
public InterestScore Calculate(InterestScoreInput input)
{
var reasons = new List<string>();
double score = 0.0;
// Factor 1: In SBOM (30%)
score += CalculateInSbomFactor(input, reasons);
// Factor 2: Reachable from entrypoint (25%)
score += CalculateReachableFactor(input, reasons);
// Factor 3: Deployed in production (20%)
score += CalculateDeployedFactor(input, reasons);
// Factor 4: No VEX Not-Affected (15%)
score += CalculateVexFactor(input, reasons);
// Factor 5: Age decay (10%)
score += CalculateRecentFactor(input, reasons);
return new InterestScore
{
CanonicalId = input.CanonicalId,
Score = Math.Round(Math.Min(score, 1.0), 2),
Reasons = reasons.ToArray(),
LastSeenInBuild = input.SbomMatches
.OrderByDescending(m => m.ScannedAt)
.FirstOrDefault()
?.SbomDigest is not null
? Guid.TryParse(input.SbomMatches.First().ArtifactId, out var id) ? id : null
: null,
ComputedAt = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Factor 1: In SBOM - advisory affects a package in org's SBOM.
/// </summary>
private double CalculateInSbomFactor(InterestScoreInput input, List<string> reasons)
{
if (input.SbomMatches.Count > 0)
{
reasons.Add("in_sbom");
return _weights.InSbom;
}
return 0.0;
}
/// <summary>
/// Factor 2: Reachable - vulnerable code is reachable from entrypoint.
/// </summary>
private double CalculateReachableFactor(InterestScoreInput input, List<string> reasons)
{
if (input.SbomMatches.Any(m => m.IsReachable))
{
reasons.Add("reachable");
return _weights.Reachable;
}
return 0.0;
}
/// <summary>
/// Factor 3: Deployed - package is deployed in production.
/// </summary>
private double CalculateDeployedFactor(InterestScoreInput input, List<string> reasons)
{
if (input.SbomMatches.Any(m => m.IsDeployed))
{
reasons.Add("deployed");
return _weights.Deployed;
}
return 0.0;
}
/// <summary>
/// Factor 4: No VEX Not-Affected - no VEX statement marking as not affected.
/// </summary>
private double CalculateVexFactor(InterestScoreInput input, List<string> reasons)
{
// If there's a NotAffected VEX, exclude this factor
var hasNotAffected = input.VexStatements.Any(v => v.Status == VexStatus.NotAffected);
if (!hasNotAffected)
{
reasons.Add("no_vex_na");
return _weights.NoVexNotAffected;
}
return 0.0;
}
/// <summary>
/// Factor 5: Recent - advisory was recently seen in builds.
/// Uses linear decay over 1 year.
/// </summary>
private double CalculateRecentFactor(InterestScoreInput input, List<string> reasons)
{
if (!input.LastSeenInBuild.HasValue)
{
return 0.0;
}
var age = DateTimeOffset.UtcNow - input.LastSeenInBuild.Value;
var decayFactor = Math.Max(0, 1 - (age.TotalDays / 365));
var ageScore = _weights.Recent * decayFactor;
if (decayFactor > 0.5)
{
reasons.Add("recent");
}
return ageScore;
}
/// <summary>
/// Calculates score from runtime signals (bonus factor).
/// </summary>
public double CalculateRuntimeBonus(InterestScoreInput input)
{
if (input.RuntimeSignals.Count == 0)
{
return 0.0;
}
// Runtime signals provide additional confidence boost
var avgConfidence = input.RuntimeSignals.Average(s => s.Confidence);
return 0.05 * avgConfidence; // Max 5% bonus
}
}

View File

@@ -0,0 +1,110 @@
// -----------------------------------------------------------------------------
// InterestScoreOptions.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-007
// Description: Configuration options for interest scoring
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Configuration options for the interest scoring service.
/// </summary>
public sealed class InterestScoreOptions
{
/// <summary>Configuration section name.</summary>
public const string SectionName = "Concelier:Interest";
/// <summary>Factor weights for score calculation.</summary>
public InterestScoreWeights Weights { get; set; } = new();
/// <summary>Stub degradation policy.</summary>
public StubDegradationPolicy DegradationPolicy { get; set; } = new();
/// <summary>Scoring job configuration.</summary>
public ScoringJobOptions Job { get; set; } = new();
/// <summary>Whether to cache scores in Valkey.</summary>
public bool EnableCache { get; set; } = true;
/// <summary>Score cache TTL.</summary>
public TimeSpan CacheTtl { get; set; } = TimeSpan.FromHours(1);
}
/// <summary>
/// Weights for each factor in the interest score calculation.
/// All weights should sum to 1.0.
/// </summary>
public sealed class InterestScoreWeights
{
/// <summary>Weight for "in SBOM" factor (default 30%).</summary>
public double InSbom { get; set; } = 0.30;
/// <summary>Weight for "reachable from entrypoint" factor (default 25%).</summary>
public double Reachable { get; set; } = 0.25;
/// <summary>Weight for "deployed in production" factor (default 20%).</summary>
public double Deployed { get; set; } = 0.20;
/// <summary>Weight for "no VEX not-affected" factor (default 15%).</summary>
public double NoVexNotAffected { get; set; } = 0.15;
/// <summary>Weight for "recently seen" factor (default 10%).</summary>
public double Recent { get; set; } = 0.10;
/// <summary>
/// Validates that weights are reasonable.
/// </summary>
public bool IsValid()
{
var total = InSbom + Reachable + Deployed + NoVexNotAffected + Recent;
return total >= 0.99 && total <= 1.01 &&
InSbom >= 0 && Reachable >= 0 && Deployed >= 0 &&
NoVexNotAffected >= 0 && Recent >= 0;
}
}
/// <summary>
/// Policy for degrading low-interest advisories to stubs.
/// </summary>
public sealed class StubDegradationPolicy
{
/// <summary>Score below which canonicals become stubs.</summary>
public double DegradationThreshold { get; set; } = 0.2;
/// <summary>Score above which stubs are restored to active.</summary>
public double RestorationThreshold { get; set; } = 0.4;
/// <summary>Minimum age (days) before degradation.</summary>
public int MinAgeDays { get; set; } = 30;
/// <summary>Maximum stubs to process per job run.</summary>
public int BatchSize { get; set; } = 1000;
/// <summary>Whether stub degradation is enabled.</summary>
public bool Enabled { get; set; } = true;
/// <summary>Interval between stub degradation job runs.</summary>
public TimeSpan JobInterval { get; set; } = TimeSpan.FromHours(6);
}
/// <summary>
/// Configuration for the scoring recalculation job.
/// </summary>
public sealed class ScoringJobOptions
{
/// <summary>Whether the scoring job is enabled.</summary>
public bool Enabled { get; set; } = true;
/// <summary>Interval between job runs.</summary>
public TimeSpan Interval { get; set; } = TimeSpan.FromHours(1);
/// <summary>Hour (UTC) for full recalculation.</summary>
public int FullRecalculationHour { get; set; } = 3;
/// <summary>Batch size for incremental updates.</summary>
public int IncrementalBatchSize { get; set; } = 500;
/// <summary>Batch size for full recalculation.</summary>
public int FullRecalculationBatchSize { get; set; } = 1000;
}

View File

@@ -0,0 +1,172 @@
// -----------------------------------------------------------------------------
// InterestScoringMetrics.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-022
// Description: OpenTelemetry metrics for interest scoring
// -----------------------------------------------------------------------------
using System.Diagnostics;
using System.Diagnostics.Metrics;
using StellaOps.Concelier.Interest.Jobs;
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Metrics instrumentation for the interest scoring service.
/// </summary>
public sealed class InterestScoringMetrics : IDisposable
{
/// <summary>
/// Activity source name for scoring operations.
/// </summary>
public const string ActivitySourceName = "StellaOps.Concelier.Interest";
/// <summary>
/// Meter name for scoring metrics.
/// </summary>
public const string MeterName = "StellaOps.Concelier.Interest";
private readonly Meter _meter;
private readonly Counter<long> _scoresComputedCounter;
private readonly Counter<long> _stubDegradationsCounter;
private readonly Counter<long> _stubRestorationsCounter;
private readonly Counter<long> _jobErrorsCounter;
private readonly Histogram<double> _scoreValueHistogram;
private readonly Histogram<double> _jobDurationHistogram;
/// <summary>
/// Activity source for tracing scoring operations.
/// </summary>
public static ActivitySource ActivitySource { get; } = new(ActivitySourceName, "1.0.0");
/// <summary>
/// Initializes a new instance of <see cref="InterestScoringMetrics"/>.
/// </summary>
public InterestScoringMetrics()
{
_meter = new Meter(MeterName, "1.0.0");
_scoresComputedCounter = _meter.CreateCounter<long>(
"concelier_interest_score_computed_total",
unit: "{scores}",
description: "Total number of interest scores computed");
_stubDegradationsCounter = _meter.CreateCounter<long>(
"concelier_stub_degradations_total",
unit: "{degradations}",
description: "Total number of advisories degraded to stub status");
_stubRestorationsCounter = _meter.CreateCounter<long>(
"concelier_stub_restorations_total",
unit: "{restorations}",
description: "Total number of stubs restored to active status");
_jobErrorsCounter = _meter.CreateCounter<long>(
"concelier_scoring_job_errors_total",
unit: "{errors}",
description: "Total number of scoring job errors");
_scoreValueHistogram = _meter.CreateHistogram<double>(
"concelier_interest_score_distribution",
unit: "{score}",
description: "Distribution of computed interest scores");
_jobDurationHistogram = _meter.CreateHistogram<double>(
"concelier_scoring_job_duration_seconds",
unit: "s",
description: "Duration of scoring job executions");
}
/// <summary>
/// Records a computed score.
/// </summary>
/// <param name="score">The computed score value (0.0-1.0).</param>
public void RecordScoreComputed(double score)
{
_scoresComputedCounter.Add(1);
_scoreValueHistogram.Record(score);
}
/// <summary>
/// Records multiple computed scores.
/// </summary>
/// <param name="count">Number of scores computed.</param>
public void RecordScoresComputed(int count)
{
_scoresComputedCounter.Add(count);
}
/// <summary>
/// Records stub degradations.
/// </summary>
/// <param name="count">Number of advisories degraded.</param>
public void RecordDegradations(int count)
{
_stubDegradationsCounter.Add(count);
}
/// <summary>
/// Records stub restorations.
/// </summary>
/// <param name="count">Number of stubs restored.</param>
public void RecordRestorations(int count)
{
_stubRestorationsCounter.Add(count);
}
/// <summary>
/// Records a job error.
/// </summary>
public void RecordJobError()
{
_jobErrorsCounter.Add(1);
}
/// <summary>
/// Records job completion metrics.
/// </summary>
/// <param name="mode">Recalculation mode.</param>
/// <param name="durationSeconds">Job duration in seconds.</param>
/// <param name="scoresUpdated">Number of scores updated.</param>
/// <param name="degraded">Number of advisories degraded.</param>
/// <param name="restored">Number of stubs restored.</param>
public void RecordJobCompletion(
RecalculationMode mode,
double durationSeconds,
int scoresUpdated,
int degraded,
int restored)
{
var modeTag = new KeyValuePair<string, object?>("mode", mode.ToString().ToLowerInvariant());
_jobDurationHistogram.Record(durationSeconds, modeTag);
_scoresComputedCounter.Add(scoresUpdated, modeTag);
if (degraded > 0)
{
_stubDegradationsCounter.Add(degraded);
}
if (restored > 0)
{
_stubRestorationsCounter.Add(restored);
}
}
/// <summary>
/// Starts an activity for tracing a scoring operation.
/// </summary>
/// <param name="operationName">Name of the operation.</param>
/// <returns>The activity, or null if tracing is disabled.</returns>
public static Activity? StartActivity(string operationName)
{
return ActivitySource.StartActivity(operationName, ActivityKind.Internal);
}
/// <inheritdoc />
public void Dispose()
{
_meter.Dispose();
ActivitySource.Dispose();
}
}

View File

@@ -0,0 +1,343 @@
// -----------------------------------------------------------------------------
// InterestScoringService.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-014, ISCORE-8200-015, ISCORE-8200-016, ISCORE-8200-017
// Description: Implementation of interest scoring service
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Cache.Valkey;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.Interest.Models;
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Implementation of the interest scoring service.
/// </summary>
public sealed class InterestScoringService : IInterestScoringService
{
private readonly IInterestScoreRepository _repository;
private readonly ICanonicalAdvisoryStore? _advisoryStore;
private readonly IAdvisoryCacheService? _cacheService;
private readonly InterestScoreCalculator _calculator;
private readonly InterestScoreOptions _options;
private readonly ILogger<InterestScoringService>? _logger;
// In-memory signal stores (in production, would be persisted)
private readonly Dictionary<Guid, List<SbomMatch>> _sbomMatches = new();
private readonly Dictionary<Guid, List<VexStatement>> _vexStatements = new();
private readonly object _signalLock = new();
/// <summary>
/// Initializes a new instance of <see cref="InterestScoringService"/>.
/// </summary>
public InterestScoringService(
IInterestScoreRepository repository,
InterestScoreCalculator calculator,
IOptions<InterestScoreOptions> options,
ICanonicalAdvisoryStore? advisoryStore = null,
IAdvisoryCacheService? cacheService = null,
ILogger<InterestScoringService>? logger = null)
{
_repository = repository;
_advisoryStore = advisoryStore;
_cacheService = cacheService;
_calculator = calculator;
_options = options.Value;
_logger = logger;
}
/// <inheritdoc />
public async Task<InterestScore> ComputeScoreAsync(Guid canonicalId, CancellationToken cancellationToken = default)
{
var input = await BuildInputAsync(canonicalId, cancellationToken).ConfigureAwait(false);
return _calculator.Calculate(input);
}
/// <inheritdoc />
public Task<InterestScore> ComputeScoreAsync(InterestScoreInput input, CancellationToken cancellationToken = default)
{
var score = _calculator.Calculate(input);
return Task.FromResult(score);
}
/// <inheritdoc />
public async Task<InterestScore?> GetScoreAsync(Guid canonicalId, CancellationToken cancellationToken = default)
{
return await _repository.GetByCanonicalIdAsync(canonicalId, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task UpdateScoreAsync(InterestScore score, CancellationToken cancellationToken = default)
{
await _repository.SaveAsync(score, cancellationToken).ConfigureAwait(false);
// Update cache if available
if (_cacheService is not null && _options.EnableCache)
{
await _cacheService.UpdateScoreAsync(score.CanonicalId.ToString(), score.Score, cancellationToken)
.ConfigureAwait(false);
}
_logger?.LogDebug("Updated interest score for {CanonicalId}: {Score}", score.CanonicalId, score.Score);
}
/// <inheritdoc />
public async Task<int> BatchUpdateAsync(IEnumerable<Guid> canonicalIds, CancellationToken cancellationToken = default)
{
var ids = canonicalIds.ToList();
var scores = new List<InterestScore>(ids.Count);
foreach (var id in ids)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var score = await ComputeScoreAsync(id, cancellationToken).ConfigureAwait(false);
scores.Add(score);
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to compute score for {CanonicalId}", id);
}
}
if (scores.Count > 0)
{
await _repository.SaveManyAsync(scores, cancellationToken).ConfigureAwait(false);
// Update cache
if (_cacheService is not null && _options.EnableCache)
{
foreach (var score in scores)
{
await _cacheService.UpdateScoreAsync(score.CanonicalId.ToString(), score.Score, cancellationToken)
.ConfigureAwait(false);
}
}
}
_logger?.LogInformation("Batch updated {Count} interest scores", scores.Count);
return scores.Count;
}
/// <inheritdoc />
public async Task<int> RecalculateAllAsync(CancellationToken cancellationToken = default)
{
_logger?.LogInformation("Starting full interest score recalculation");
if (_advisoryStore is null)
{
_logger?.LogWarning("Cannot recalculate all: advisory store not available");
return 0;
}
var totalUpdated = 0;
var offset = 0;
var batchSize = _options.Job.FullRecalculationBatchSize;
while (!cancellationToken.IsCancellationRequested)
{
var result = await _advisoryStore.QueryAsync(
new CanonicalQueryOptions { Offset = offset, Limit = batchSize, Status = CanonicalStatus.Active },
cancellationToken).ConfigureAwait(false);
if (result.Items.Count == 0)
{
break;
}
var ids = result.Items.Select(c => c.Id).ToList();
var updated = await BatchUpdateAsync(ids, cancellationToken).ConfigureAwait(false);
totalUpdated += updated;
offset += batchSize;
_logger?.LogDebug("Recalculated {Count} scores (total: {Total})", updated, totalUpdated);
}
_logger?.LogInformation("Completed full recalculation: {Total} scores updated", totalUpdated);
return totalUpdated;
}
/// <inheritdoc />
public async Task<int> DegradeToStubsAsync(double threshold, CancellationToken cancellationToken = default)
{
if (!_options.DegradationPolicy.Enabled)
{
return 0;
}
if (_advisoryStore is null)
{
_logger?.LogWarning("Cannot degrade to stubs: advisory store not available");
return 0;
}
var minAge = TimeSpan.FromDays(_options.DegradationPolicy.MinAgeDays);
var candidates = await _repository.GetLowScoreCanonicalIdsAsync(
threshold, minAge, _options.DegradationPolicy.BatchSize, cancellationToken)
.ConfigureAwait(false);
var degraded = 0;
foreach (var id in candidates)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await _advisoryStore.UpdateStatusAsync(id, CanonicalStatus.Stub, cancellationToken)
.ConfigureAwait(false);
degraded++;
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to degrade {CanonicalId} to stub", id);
}
}
if (degraded > 0)
{
_logger?.LogInformation("Degraded {Count} low-interest advisories to stubs", degraded);
}
return degraded;
}
/// <inheritdoc />
public async Task<int> RestoreFromStubsAsync(double threshold, CancellationToken cancellationToken = default)
{
if (!_options.DegradationPolicy.Enabled)
{
return 0;
}
if (_advisoryStore is null)
{
_logger?.LogWarning("Cannot restore from stubs: advisory store not available");
return 0;
}
var candidates = await _repository.GetHighScoreCanonicalIdsAsync(
threshold, _options.DegradationPolicy.BatchSize, cancellationToken)
.ConfigureAwait(false);
var restored = 0;
foreach (var id in candidates)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var advisory = await _advisoryStore.GetByIdAsync(id, cancellationToken).ConfigureAwait(false);
if (advisory?.Status == CanonicalStatus.Stub)
{
await _advisoryStore.UpdateStatusAsync(id, CanonicalStatus.Active, cancellationToken)
.ConfigureAwait(false);
restored++;
}
}
catch (Exception ex)
{
_logger?.LogWarning(ex, "Failed to restore {CanonicalId} from stub", id);
}
}
if (restored > 0)
{
_logger?.LogInformation("Restored {Count} stubs to active status", restored);
}
return restored;
}
/// <inheritdoc />
public async Task<IReadOnlyList<InterestScore>> GetTopScoresAsync(int limit = 100, CancellationToken cancellationToken = default)
{
return await _repository.GetTopScoresAsync(limit, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<ScoreDistribution> GetDistributionAsync(CancellationToken cancellationToken = default)
{
return await _repository.GetDistributionAsync(cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public Task RecordSbomMatchAsync(
Guid canonicalId,
string sbomDigest,
string purl,
bool isReachable = false,
bool isDeployed = false,
CancellationToken cancellationToken = default)
{
var match = new SbomMatch
{
SbomDigest = sbomDigest,
Purl = purl,
IsReachable = isReachable,
IsDeployed = isDeployed,
ScannedAt = DateTimeOffset.UtcNow
};
lock (_signalLock)
{
if (!_sbomMatches.TryGetValue(canonicalId, out var matches))
{
matches = [];
_sbomMatches[canonicalId] = matches;
}
matches.Add(match);
}
_logger?.LogDebug("Recorded SBOM match for {CanonicalId}: {Purl}", canonicalId, purl);
return Task.CompletedTask;
}
/// <inheritdoc />
public Task RecordVexStatementAsync(
Guid canonicalId,
VexStatement statement,
CancellationToken cancellationToken = default)
{
lock (_signalLock)
{
if (!_vexStatements.TryGetValue(canonicalId, out var statements))
{
statements = [];
_vexStatements[canonicalId] = statements;
}
statements.Add(statement);
}
_logger?.LogDebug("Recorded VEX statement for {CanonicalId}: {Status}", canonicalId, statement.Status);
return Task.CompletedTask;
}
private Task<InterestScoreInput> BuildInputAsync(Guid canonicalId, CancellationToken cancellationToken)
{
List<SbomMatch> sbomMatches;
List<VexStatement> vexStatements;
lock (_signalLock)
{
sbomMatches = _sbomMatches.TryGetValue(canonicalId, out var sm) ? [.. sm] : [];
vexStatements = _vexStatements.TryGetValue(canonicalId, out var vs) ? [.. vs] : [];
}
var input = new InterestScoreInput
{
CanonicalId = canonicalId,
SbomMatches = sbomMatches,
VexStatements = vexStatements,
LastSeenInBuild = sbomMatches.Count > 0
? sbomMatches.Max(m => m.ScannedAt)
: null
};
return Task.FromResult(input);
}
}

View File

@@ -0,0 +1,186 @@
// -----------------------------------------------------------------------------
// InterestScoreRecalculationJob.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-019, ISCORE-8200-020, ISCORE-8200-021, ISCORE-8200-022
// Description: Background job for interest score recalculation
// -----------------------------------------------------------------------------
using System.Diagnostics;
using System.Diagnostics.Metrics;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Concelier.Interest.Jobs;
/// <summary>
/// Background job that periodically recalculates interest scores.
/// </summary>
/// <remarks>
/// Modes:
/// - Incremental: Updates scores for recently changed advisories (hourly)
/// - Full: Recalculates all active advisories (nightly at configured hour)
/// </remarks>
public sealed class InterestScoreRecalculationJob : BackgroundService
{
private readonly IServiceProvider _services;
private readonly InterestScoreOptions _options;
private readonly ILogger<InterestScoreRecalculationJob>? _logger;
private readonly InterestScoringMetrics _metrics;
private DateTimeOffset _lastFullRecalculation = DateTimeOffset.MinValue;
/// <summary>
/// Initializes a new instance of <see cref="InterestScoreRecalculationJob"/>.
/// </summary>
public InterestScoreRecalculationJob(
IServiceProvider services,
IOptions<InterestScoreOptions> options,
InterestScoringMetrics metrics,
ILogger<InterestScoreRecalculationJob>? logger = null)
{
_services = services;
_options = options.Value;
_metrics = metrics;
_logger = logger;
}
/// <inheritdoc />
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.Job.Enabled)
{
_logger?.LogInformation("Interest score recalculation job is disabled");
return;
}
_logger?.LogInformation(
"Interest score recalculation job started (interval: {Interval}, full recalc hour: {Hour} UTC)",
_options.Job.Interval,
_options.Job.FullRecalculationHour);
// Initial delay to let the application fully start
await Task.Delay(TimeSpan.FromSeconds(30), stoppingToken).ConfigureAwait(false);
while (!stoppingToken.IsCancellationRequested)
{
try
{
await RunJobIterationAsync(stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
_logger?.LogInformation("Interest score recalculation job cancelled");
break;
}
catch (Exception ex)
{
_logger?.LogError(ex, "Interest score recalculation job failed");
_metrics.RecordJobError();
}
await Task.Delay(_options.Job.Interval, stoppingToken).ConfigureAwait(false);
}
}
private async Task RunJobIterationAsync(CancellationToken cancellationToken)
{
var sw = Stopwatch.StartNew();
var mode = DetermineRecalculationMode();
using var activity = InterestScoringMetrics.ActivitySource.StartActivity(
$"InterestScoreRecalculation.{mode}");
activity?.SetTag("mode", mode.ToString().ToLowerInvariant());
_logger?.LogInformation("Starting {Mode} interest score recalculation", mode);
await using var scope = _services.CreateAsyncScope();
var scoringService = scope.ServiceProvider.GetRequiredService<IInterestScoringService>();
int updated;
int degraded = 0;
int restored = 0;
if (mode == RecalculationMode.Full)
{
updated = await scoringService.RecalculateAllAsync(cancellationToken).ConfigureAwait(false);
_lastFullRecalculation = DateTimeOffset.UtcNow;
}
else
{
// Incremental mode: get changed advisory IDs and update them
var changedIds = await GetChangedCanonicalIdsAsync(scope.ServiceProvider, cancellationToken)
.ConfigureAwait(false);
updated = await scoringService.BatchUpdateAsync(changedIds, cancellationToken).ConfigureAwait(false);
}
// Run stub degradation if enabled
if (_options.DegradationPolicy.Enabled)
{
degraded = await scoringService.DegradeToStubsAsync(
_options.DegradationPolicy.DegradationThreshold,
cancellationToken).ConfigureAwait(false);
restored = await scoringService.RestoreFromStubsAsync(
_options.DegradationPolicy.RestorationThreshold,
cancellationToken).ConfigureAwait(false);
}
sw.Stop();
_metrics.RecordJobCompletion(mode, sw.Elapsed.TotalSeconds, updated, degraded, restored);
_logger?.LogInformation(
"Completed {Mode} recalculation in {Duration:F2}s: {Updated} updated, {Degraded} degraded, {Restored} restored",
mode, sw.Elapsed.TotalSeconds, updated, degraded, restored);
}
private RecalculationMode DetermineRecalculationMode()
{
var now = DateTimeOffset.UtcNow;
// Full recalculation conditions:
// 1. It's the configured hour (e.g., 3 AM UTC)
// 2. We haven't done a full recalculation today
if (now.Hour == _options.Job.FullRecalculationHour &&
now.Date != _lastFullRecalculation.Date)
{
return RecalculationMode.Full;
}
return RecalculationMode.Incremental;
}
private async Task<IReadOnlyList<Guid>> GetChangedCanonicalIdsAsync(
IServiceProvider services,
CancellationToken cancellationToken)
{
// Get repository to find stale scores
var repository = services.GetService<IInterestScoreRepository>();
if (repository is null)
{
_logger?.LogWarning("IInterestScoreRepository not available for incremental updates");
return [];
}
// Find scores that haven't been updated recently
var staleAfter = DateTimeOffset.UtcNow - _options.Job.Interval;
return await repository.GetStaleCanonicalIdsAsync(
staleAfter,
_options.Job.IncrementalBatchSize,
cancellationToken).ConfigureAwait(false);
}
}
/// <summary>
/// Recalculation mode.
/// </summary>
public enum RecalculationMode
{
/// <summary>Update only changed/stale advisories.</summary>
Incremental,
/// <summary>Recalculate all active advisories.</summary>
Full
}

View File

@@ -0,0 +1,127 @@
// -----------------------------------------------------------------------------
// StubDegradationJob.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-027
// Description: Background job for stub degradation and restoration
// -----------------------------------------------------------------------------
using System.Diagnostics;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Concelier.Interest.Jobs;
/// <summary>
/// Background job that periodically degrades low-interest advisories to stubs
/// and restores stubs when their interest score increases.
/// </summary>
/// <remarks>
/// This job runs independently from the scoring recalculation job, allowing
/// different scheduling for degradation cleanup vs. score updates.
/// </remarks>
public sealed class StubDegradationJob : BackgroundService
{
private readonly IServiceProvider _services;
private readonly InterestScoreOptions _options;
private readonly ILogger<StubDegradationJob>? _logger;
private readonly InterestScoringMetrics _metrics;
/// <summary>
/// Initializes a new instance of <see cref="StubDegradationJob"/>.
/// </summary>
public StubDegradationJob(
IServiceProvider services,
IOptions<InterestScoreOptions> options,
InterestScoringMetrics metrics,
ILogger<StubDegradationJob>? logger = null)
{
_services = services;
_options = options.Value;
_metrics = metrics;
_logger = logger;
}
/// <inheritdoc />
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.DegradationPolicy.Enabled)
{
_logger?.LogInformation("Stub degradation job is disabled");
return;
}
_logger?.LogInformation(
"Stub degradation job started (interval: {Interval}, degradation threshold: {DegThreshold}, restoration threshold: {RestThreshold})",
_options.DegradationPolicy.JobInterval,
_options.DegradationPolicy.DegradationThreshold,
_options.DegradationPolicy.RestorationThreshold);
// Initial delay to let the application fully start
await Task.Delay(TimeSpan.FromMinutes(1), stoppingToken).ConfigureAwait(false);
while (!stoppingToken.IsCancellationRequested)
{
try
{
await RunDegradationCycleAsync(stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
_logger?.LogInformation("Stub degradation job cancelled");
break;
}
catch (Exception ex)
{
_logger?.LogError(ex, "Stub degradation job failed");
_metrics.RecordJobError();
}
await Task.Delay(_options.DegradationPolicy.JobInterval, stoppingToken).ConfigureAwait(false);
}
}
private async Task RunDegradationCycleAsync(CancellationToken cancellationToken)
{
var sw = Stopwatch.StartNew();
using var activity = InterestScoringMetrics.ActivitySource.StartActivity("StubDegradation");
_logger?.LogInformation("Starting stub degradation cycle");
await using var scope = _services.CreateAsyncScope();
var scoringService = scope.ServiceProvider.GetRequiredService<IInterestScoringService>();
// Phase 1: Degrade low-interest advisories
var degraded = await scoringService.DegradeToStubsAsync(
_options.DegradationPolicy.DegradationThreshold,
cancellationToken).ConfigureAwait(false);
if (degraded > 0)
{
_logger?.LogInformation("Degraded {Count} advisories to stubs", degraded);
_metrics.RecordDegradations(degraded);
}
// Phase 2: Restore stubs with increased interest
var restored = await scoringService.RestoreFromStubsAsync(
_options.DegradationPolicy.RestorationThreshold,
cancellationToken).ConfigureAwait(false);
if (restored > 0)
{
_logger?.LogInformation("Restored {Count} stubs to active status", restored);
_metrics.RecordRestorations(restored);
}
sw.Stop();
activity?.SetTag("degraded", degraded);
activity?.SetTag("restored", restored);
_logger?.LogInformation(
"Completed stub degradation cycle in {Duration:F2}s: {Degraded} degraded, {Restored} restored",
sw.Elapsed.TotalSeconds, degraded, restored);
}
}

View File

@@ -0,0 +1,65 @@
// -----------------------------------------------------------------------------
// InterestScore.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-002
// Description: Domain model for interest score
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Interest.Models;
/// <summary>
/// Interest score for a canonical advisory.
/// Higher scores indicate higher relevance to the organization's SBOM/runtime.
/// </summary>
public sealed record InterestScore
{
/// <summary>Canonical advisory identifier.</summary>
public Guid CanonicalId { get; init; }
/// <summary>
/// Interest score from 0.0 to 1.0.
/// Higher scores = higher relevance.
/// </summary>
public double Score { get; init; }
/// <summary>
/// Reasons contributing to the score.
/// Possible values: in_sbom, reachable, deployed, no_vex_na, recent
/// </summary>
public IReadOnlyList<string> Reasons { get; init; } = [];
/// <summary>Last build/scan where this advisory was relevant.</summary>
public Guid? LastSeenInBuild { get; init; }
/// <summary>When the score was last computed.</summary>
public DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Interest tier based on score.
/// </summary>
public InterestTier Tier => Score switch
{
>= 0.7 => InterestTier.High,
>= 0.4 => InterestTier.Medium,
>= 0.2 => InterestTier.Low,
_ => InterestTier.None
};
}
/// <summary>
/// Interest tier classification.
/// </summary>
public enum InterestTier
{
/// <summary>No interest (score < 0.2).</summary>
None,
/// <summary>Low interest (score 0.2-0.4).</summary>
Low,
/// <summary>Medium interest (score 0.4-0.7).</summary>
Medium,
/// <summary>High interest (score >= 0.7).</summary>
High
}

View File

@@ -0,0 +1,113 @@
// -----------------------------------------------------------------------------
// InterestScoreInput.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-006
// Description: Input signals for interest score calculation
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.Interest.Models;
/// <summary>
/// Input signals for interest score calculation.
/// </summary>
public sealed record InterestScoreInput
{
/// <summary>Canonical advisory identifier.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>SBOM matches where this canonical's package appears.</summary>
public IReadOnlyList<SbomMatch> SbomMatches { get; init; } = [];
/// <summary>VEX statements affecting this canonical.</summary>
public IReadOnlyList<VexStatement> VexStatements { get; init; } = [];
/// <summary>Runtime signals for this canonical.</summary>
public IReadOnlyList<RuntimeSignal> RuntimeSignals { get; init; } = [];
/// <summary>When the advisory was last seen in a build.</summary>
public DateTimeOffset? LastSeenInBuild { get; init; }
}
/// <summary>
/// SBOM match indicating canonical affects a package in an org's SBOM.
/// </summary>
public sealed record SbomMatch
{
/// <summary>SBOM digest for deduplication.</summary>
public required string SbomDigest { get; init; }
/// <summary>Matched package PURL.</summary>
public required string Purl { get; init; }
/// <summary>Whether the vulnerable code is reachable from entrypoint.</summary>
public bool IsReachable { get; init; }
/// <summary>Whether the package is deployed in production.</summary>
public bool IsDeployed { get; init; }
/// <summary>When the SBOM was scanned.</summary>
public DateTimeOffset ScannedAt { get; init; }
/// <summary>Optional artifact identifier.</summary>
public string? ArtifactId { get; init; }
}
/// <summary>
/// VEX statement affecting the canonical.
/// </summary>
public sealed record VexStatement
{
/// <summary>Unique statement identifier.</summary>
public required string StatementId { get; init; }
/// <summary>VEX status.</summary>
public required VexStatus Status { get; init; }
/// <summary>Justification for the status.</summary>
public string? Justification { get; init; }
/// <summary>When the statement was issued.</summary>
public DateTimeOffset? IssuedAt { get; init; }
/// <summary>Issuer identifier.</summary>
public string? IssuerId { get; init; }
}
/// <summary>
/// VEX status values.
/// </summary>
public enum VexStatus
{
/// <summary>Product is affected by the vulnerability.</summary>
Affected,
/// <summary>Product is not affected by the vulnerability.</summary>
NotAffected,
/// <summary>Vulnerability has been fixed in the product.</summary>
Fixed,
/// <summary>Vulnerability is under investigation.</summary>
UnderInvestigation
}
/// <summary>
/// Runtime signal indicating advisory relevance.
/// </summary>
public sealed record RuntimeSignal
{
/// <summary>Signal type (e.g., "loaded", "invoked", "network_call").</summary>
public required string SignalType { get; init; }
/// <summary>Signal source (e.g., "ebpf", "agent", "telemetry").</summary>
public required string Source { get; init; }
/// <summary>When the signal was observed.</summary>
public DateTimeOffset ObservedAt { get; init; }
/// <summary>Confidence level (0.0 - 1.0).</summary>
public double Confidence { get; init; } = 1.0;
/// <summary>Additional context.</summary>
public string? Context { get; init; }
}

View File

@@ -0,0 +1,88 @@
// -----------------------------------------------------------------------------
// ServiceCollectionExtensions.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-014, ISCORE-8200-022, ISCORE-8200-027
// Description: DI registration for interest scoring services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Concelier.Interest.Jobs;
namespace StellaOps.Concelier.Interest;
/// <summary>
/// Extension methods for registering interest scoring services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds interest scoring services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">The configuration root.</param>
/// <param name="enableRecalculationJob">Whether to enable the background recalculation job.</param>
/// <param name="enableDegradationJob">Whether to enable the background stub degradation job.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierInterestScoring(
this IServiceCollection services,
IConfiguration configuration,
bool enableRecalculationJob = true,
bool enableDegradationJob = true)
{
// Bind options from configuration
services.Configure<InterestScoreOptions>(
configuration.GetSection(InterestScoreOptions.SectionName));
return AddCoreServices(services, enableRecalculationJob, enableDegradationJob);
}
/// <summary>
/// Adds interest scoring services with custom options.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Action to configure options.</param>
/// <param name="enableRecalculationJob">Whether to enable the background recalculation job.</param>
/// <param name="enableDegradationJob">Whether to enable the background stub degradation job.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierInterestScoring(
this IServiceCollection services,
Action<InterestScoreOptions> configureOptions,
bool enableRecalculationJob = true,
bool enableDegradationJob = true)
{
services.Configure(configureOptions);
return AddCoreServices(services, enableRecalculationJob, enableDegradationJob);
}
private static IServiceCollection AddCoreServices(
IServiceCollection services,
bool enableRecalculationJob,
bool enableDegradationJob)
{
// Register metrics
services.TryAddSingleton<InterestScoringMetrics>();
// Register calculator
services.TryAddSingleton<InterestScoreCalculator>();
// Register service
services.TryAddScoped<IInterestScoringService, InterestScoringService>();
// Register background recalculation job if enabled
if (enableRecalculationJob)
{
services.AddHostedService<InterestScoreRecalculationJob>();
}
// Register stub degradation job if enabled
if (enableDegradationJob)
{
services.AddHostedService<StubDegradationJob>();
}
return services;
}
}

View File

@@ -0,0 +1,30 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Concelier.Interest</RootNamespace>
<AssemblyName>StellaOps.Concelier.Interest</AssemblyName>
<Description>Interest scoring for Concelier canonical advisories</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
<PackageReference Include="System.Diagnostics.DiagnosticSource" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Cache.Valkey\StellaOps.Concelier.Cache.Valkey.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,51 @@
// -----------------------------------------------------------------------------
// SbomLearnedEvent.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-024
// Description: Event emitted when an SBOM is learned/registered
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.SbomIntegration.Events;
/// <summary>
/// Event emitted when an SBOM has been registered and matched against advisories.
/// Downstream consumers can use this to trigger additional processing.
/// </summary>
public sealed record SbomLearnedEvent
{
/// <summary>Event timestamp.</summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
/// <summary>SBOM registration ID.</summary>
public required Guid SbomId { get; init; }
/// <summary>SBOM content digest.</summary>
public required string SbomDigest { get; init; }
/// <summary>Optional tenant ID.</summary>
public string? TenantId { get; init; }
/// <summary>Primary component name (e.g., image name).</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>Total components in the SBOM.</summary>
public int ComponentCount { get; init; }
/// <summary>Number of advisories matched.</summary>
public int AdvisoriesMatched { get; init; }
/// <summary>Number of interest scores updated.</summary>
public int ScoresUpdated { get; init; }
/// <summary>Canonical advisory IDs that were matched.</summary>
public required IReadOnlyList<Guid> AffectedCanonicalIds { get; init; }
/// <summary>Processing duration in milliseconds.</summary>
public double ProcessingTimeMs { get; init; }
/// <summary>Whether this was a re-match of an existing SBOM.</summary>
public bool IsRematch { get; init; }
}

View File

@@ -0,0 +1,56 @@
// -----------------------------------------------------------------------------
// ISbomAdvisoryMatcher.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-008
// Description: Interface for matching SBOM components against advisories
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration;
/// <summary>
/// Service for matching SBOM components against canonical advisories.
/// </summary>
public interface ISbomAdvisoryMatcher
{
/// <summary>
/// Matches a set of PURLs against canonical advisories.
/// </summary>
/// <param name="sbomId">SBOM registration ID.</param>
/// <param name="sbomDigest">SBOM content digest.</param>
/// <param name="purls">PURLs to match.</param>
/// <param name="reachabilityMap">Optional reachability data per PURL.</param>
/// <param name="deploymentMap">Optional deployment status per PURL.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of matches found.</returns>
Task<IReadOnlyList<SbomAdvisoryMatch>> MatchAsync(
Guid sbomId,
string sbomDigest,
IEnumerable<string> purls,
IReadOnlyDictionary<string, bool>? reachabilityMap = null,
IReadOnlyDictionary<string, bool>? deploymentMap = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets all canonical IDs that could affect a PURL.
/// </summary>
/// <param name="purl">Package URL.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of canonical advisory IDs.</returns>
Task<IReadOnlyList<Guid>> FindAffectingCanonicalIdsAsync(
string purl,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a specific PURL is affected by a specific advisory.
/// </summary>
/// <param name="purl">Package URL.</param>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Match result, or null if not affected.</returns>
Task<SbomAdvisoryMatch?> CheckMatchAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,112 @@
// -----------------------------------------------------------------------------
// ISbomRegistryRepository.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-001
// Description: Repository interface for SBOM registry persistence
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration;
/// <summary>
/// Repository for SBOM registration persistence.
/// </summary>
public interface ISbomRegistryRepository
{
#region Registration CRUD
/// <summary>
/// Saves or updates an SBOM registration.
/// </summary>
Task SaveAsync(SbomRegistration registration, CancellationToken cancellationToken = default);
/// <summary>
/// Gets an SBOM registration by digest.
/// </summary>
Task<SbomRegistration?> GetByDigestAsync(string digest, CancellationToken cancellationToken = default);
/// <summary>
/// Gets an SBOM registration by ID.
/// </summary>
Task<SbomRegistration?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default);
/// <summary>
/// Lists registrations with pagination.
/// </summary>
Task<IReadOnlyList<SbomRegistration>> ListAsync(
int offset,
int limit,
string? tenantId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes an SBOM registration by digest.
/// </summary>
Task DeleteAsync(string digest, CancellationToken cancellationToken = default);
/// <summary>
/// Counts total registrations.
/// </summary>
Task<long> CountAsync(string? tenantId = null, CancellationToken cancellationToken = default);
#endregion
#region Match CRUD
/// <summary>
/// Saves SBOM matches (replaces existing).
/// </summary>
Task SaveMatchesAsync(
Guid sbomId,
IEnumerable<SbomAdvisoryMatch> matches,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets matches for an SBOM.
/// </summary>
Task<IReadOnlyList<SbomAdvisoryMatch>> GetMatchesAsync(
string digest,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets matches for a canonical advisory.
/// </summary>
Task<IReadOnlyList<SbomAdvisoryMatch>> GetMatchesByCanonicalAsync(
Guid canonicalId,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes all matches for an SBOM.
/// </summary>
Task DeleteMatchesAsync(Guid sbomId, CancellationToken cancellationToken = default);
#endregion
#region Statistics
/// <summary>
/// Gets registry statistics.
/// </summary>
Task<SbomRegistryStats> GetStatsAsync(
string? tenantId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates affected count for an SBOM.
/// </summary>
Task UpdateAffectedCountAsync(
string digest,
int affectedCount,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates last matched timestamp.
/// </summary>
Task UpdateLastMatchedAsync(
string digest,
DateTimeOffset lastMatched,
CancellationToken cancellationToken = default);
#endregion
}

View File

@@ -0,0 +1,179 @@
// -----------------------------------------------------------------------------
// ISbomRegistryService.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-001
// Description: Service interface for SBOM registration and advisory matching
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration;
/// <summary>
/// Service for registering SBOMs and matching them against canonical advisories.
/// </summary>
public interface ISbomRegistryService
{
#region Registration
/// <summary>
/// Registers an SBOM for advisory matching.
/// </summary>
/// <param name="input">SBOM registration input.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>SBOM registration record.</returns>
Task<SbomRegistration> RegisterSbomAsync(
SbomRegistrationInput input,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets an SBOM registration by digest.
/// </summary>
/// <param name="digest">SBOM content digest.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Registration, or null if not found.</returns>
Task<SbomRegistration?> GetByDigestAsync(
string digest,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets an SBOM registration by ID.
/// </summary>
/// <param name="id">Registration ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Registration, or null if not found.</returns>
Task<SbomRegistration?> GetByIdAsync(
Guid id,
CancellationToken cancellationToken = default);
/// <summary>
/// Lists registered SBOMs with pagination.
/// </summary>
/// <param name="offset">Pagination offset.</param>
/// <param name="limit">Maximum results.</param>
/// <param name="tenantId">Optional tenant filter.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of registrations.</returns>
Task<IReadOnlyList<SbomRegistration>> ListAsync(
int offset = 0,
int limit = 50,
string? tenantId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Removes an SBOM registration.
/// </summary>
/// <param name="digest">SBOM content digest.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UnregisterAsync(
string digest,
CancellationToken cancellationToken = default);
#endregion
#region Learning (Full Flow)
/// <summary>
/// Learns from an SBOM: registers, matches advisories, updates scores.
/// </summary>
/// <param name="input">SBOM registration input.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Learn result with matches and score updates.</returns>
Task<SbomLearnResult> LearnSbomAsync(
SbomRegistrationInput input,
CancellationToken cancellationToken = default);
/// <summary>
/// Re-matches an existing SBOM against current advisories.
/// </summary>
/// <param name="digest">SBOM content digest.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Learn result with updated matches.</returns>
Task<SbomLearnResult> RematchSbomAsync(
string digest,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates an SBOM with delta changes (added/removed PURLs).
/// Performs incremental matching only for changed components.
/// </summary>
/// <param name="digest">SBOM content digest.</param>
/// <param name="delta">Delta changes.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Learn result with incremental matches.</returns>
Task<SbomLearnResult> UpdateSbomDeltaAsync(
string digest,
SbomDeltaInput delta,
CancellationToken cancellationToken = default);
#endregion
#region Matching
/// <summary>
/// Gets advisories affecting an SBOM.
/// </summary>
/// <param name="digest">SBOM content digest.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of matches.</returns>
Task<IReadOnlyList<SbomAdvisoryMatch>> GetMatchesAsync(
string digest,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets SBOMs affected by a canonical advisory.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of matches.</returns>
Task<IReadOnlyList<SbomAdvisoryMatch>> GetSbomsForAdvisoryAsync(
Guid canonicalId,
CancellationToken cancellationToken = default);
#endregion
#region Statistics
/// <summary>
/// Counts total registered SBOMs.
/// </summary>
/// <param name="tenantId">Optional tenant filter.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Total count.</returns>
Task<long> CountAsync(
string? tenantId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets SBOM registry statistics.
/// </summary>
/// <param name="tenantId">Optional tenant filter.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Statistics.</returns>
Task<SbomRegistryStats> GetStatsAsync(
string? tenantId = null,
CancellationToken cancellationToken = default);
#endregion
}
/// <summary>
/// SBOM registry statistics.
/// </summary>
public sealed record SbomRegistryStats
{
/// <summary>Total registered SBOMs.</summary>
public long TotalSboms { get; init; }
/// <summary>Total unique PURLs across all SBOMs.</summary>
public long TotalPurls { get; init; }
/// <summary>Total advisory matches.</summary>
public long TotalMatches { get; init; }
/// <summary>SBOMs with at least one advisory match.</summary>
public long AffectedSboms { get; init; }
/// <summary>Average advisories per SBOM.</summary>
public double AverageMatchesPerSbom { get; init; }
}

View File

@@ -0,0 +1,155 @@
// -----------------------------------------------------------------------------
// IPurlCanonicalIndex.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-006
// Description: Interface for PURL to canonical advisory index
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration.Index;
/// <summary>
/// Index for fast PURL to canonical advisory lookups.
/// </summary>
public interface IPurlCanonicalIndex
{
/// <summary>
/// Gets all canonical advisory IDs that affect a PURL.
/// </summary>
/// <param name="purl">Package URL.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of canonical advisory IDs.</returns>
Task<IReadOnlyList<Guid>> GetCanonicalIdsForPurlAsync(
string purl,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets canonical IDs for multiple PURLs in batch.
/// </summary>
/// <param name="purls">Package URLs.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Dictionary mapping PURLs to canonical IDs.</returns>
Task<IReadOnlyDictionary<string, IReadOnlyList<Guid>>> GetCanonicalIdsForPurlsBatchAsync(
IEnumerable<string> purls,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a PURL is affected by a specific canonical advisory.
/// </summary>
/// <param name="purl">Package URL.</param>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if affected.</returns>
Task<bool> IsAffectedAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets match information for a PURL and canonical advisory pair.
/// </summary>
/// <param name="purl">Package URL.</param>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Match information, or null if not affected.</returns>
Task<PurlMatchInfo?> GetMatchInfoAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default);
/// <summary>
/// Indexes a canonical advisory for PURL lookups.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="affectsKey">Affected PURL or CPE.</param>
/// <param name="versionConstraint">Optional version constraint.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task IndexCanonicalAsync(
Guid canonicalId,
string affectsKey,
string? versionConstraint = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Indexes multiple canonical advisories in batch.
/// </summary>
/// <param name="entries">Entries to index.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task IndexCanonicalBatchAsync(
IEnumerable<PurlIndexEntry> entries,
CancellationToken cancellationToken = default);
/// <summary>
/// Removes a canonical advisory from the index.
/// </summary>
/// <param name="canonicalId">Canonical advisory ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UnindexCanonicalAsync(
Guid canonicalId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets index statistics.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Index statistics.</returns>
Task<PurlIndexStats> GetStatsAsync(CancellationToken cancellationToken = default);
}
/// <summary>
/// Match information between a PURL and a canonical advisory.
/// </summary>
public sealed record PurlMatchInfo
{
/// <summary>Matched PURL.</summary>
public required string Purl { get; init; }
/// <summary>Canonical advisory ID.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>Match method used.</summary>
public MatchMethod Method { get; init; } = MatchMethod.ExactPurl;
/// <summary>Match confidence (0-1).</summary>
public double Confidence { get; init; } = 1.0;
/// <summary>Version constraint from advisory.</summary>
public string? VersionConstraint { get; init; }
}
/// <summary>
/// Entry for batch indexing.
/// </summary>
public sealed record PurlIndexEntry
{
/// <summary>Canonical advisory ID.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>Affected PURL or CPE.</summary>
public required string AffectsKey { get; init; }
/// <summary>Optional version constraint.</summary>
public string? VersionConstraint { get; init; }
}
/// <summary>
/// PURL index statistics.
/// </summary>
public sealed record PurlIndexStats
{
/// <summary>Total indexed PURLs.</summary>
public long TotalPurls { get; init; }
/// <summary>Total indexed canonicals.</summary>
public long TotalCanonicals { get; init; }
/// <summary>Total PURL→canonical mappings.</summary>
public long TotalMappings { get; init; }
/// <summary>Cache hit rate (0-1).</summary>
public double CacheHitRate { get; init; }
/// <summary>Last index update time.</summary>
public DateTimeOffset? LastUpdatedAt { get; init; }
}

View File

@@ -0,0 +1,396 @@
// -----------------------------------------------------------------------------
// ValkeyPurlCanonicalIndex.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Tasks: SBOM-8200-006, SBOM-8200-011
// Description: Valkey-backed PURL to canonical advisory index
// -----------------------------------------------------------------------------
using System.Collections.Concurrent;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.SbomIntegration.Models;
using StackExchange.Redis;
namespace StellaOps.Concelier.SbomIntegration.Index;
/// <summary>
/// Valkey-backed implementation of PURL to canonical advisory index.
/// Provides fast O(1) lookups from PURL to canonical IDs.
/// </summary>
public sealed class ValkeyPurlCanonicalIndex : IPurlCanonicalIndex
{
private const string PurlKeyPrefix = "purl:";
private const string CanonicalKeyPrefix = "canonical:purls:";
private const string StatsKey = "purl_index:stats";
private static readonly TimeSpan DefaultTtl = TimeSpan.FromHours(24);
private readonly IConnectionMultiplexer _redis;
private readonly ICanonicalAdvisoryService _canonicalService;
private readonly ILogger<ValkeyPurlCanonicalIndex> _logger;
private long _cacheHits;
private long _cacheMisses;
public ValkeyPurlCanonicalIndex(
IConnectionMultiplexer redis,
ICanonicalAdvisoryService canonicalService,
ILogger<ValkeyPurlCanonicalIndex> logger)
{
_redis = redis ?? throw new ArgumentNullException(nameof(redis));
_canonicalService = canonicalService ?? throw new ArgumentNullException(nameof(canonicalService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> GetCanonicalIdsForPurlAsync(
string purl,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(purl))
{
return [];
}
var db = _redis.GetDatabase();
var key = GetPurlKey(purl);
// Try cache first
var cached = await db.StringGetAsync(key).ConfigureAwait(false);
if (cached.HasValue)
{
Interlocked.Increment(ref _cacheHits);
return DeserializeGuids(cached!);
}
Interlocked.Increment(ref _cacheMisses);
// Fall back to database
var advisories = await _canonicalService.GetByArtifactAsync(purl, cancellationToken)
.ConfigureAwait(false);
var ids = advisories.Select(a => a.Id).ToList();
// Cache the result
if (ids.Count > 0)
{
await db.StringSetAsync(key, SerializeGuids(ids), DefaultTtl).ConfigureAwait(false);
}
return ids;
}
/// <inheritdoc />
public async Task<IReadOnlyDictionary<string, IReadOnlyList<Guid>>> GetCanonicalIdsForPurlsBatchAsync(
IEnumerable<string> purls,
CancellationToken cancellationToken = default)
{
var purlList = purls.Where(p => !string.IsNullOrWhiteSpace(p)).Distinct().ToList();
if (purlList.Count == 0)
{
return new Dictionary<string, IReadOnlyList<Guid>>();
}
var db = _redis.GetDatabase();
var results = new ConcurrentDictionary<string, IReadOnlyList<Guid>>();
var uncachedPurls = new List<string>();
// Batch lookup in cache
var keys = purlList.Select(p => (RedisKey)GetPurlKey(p)).ToArray();
var values = await db.StringGetAsync(keys).ConfigureAwait(false);
for (int i = 0; i < purlList.Count; i++)
{
if (values[i].HasValue)
{
Interlocked.Increment(ref _cacheHits);
results[purlList[i]] = DeserializeGuids(values[i]!);
}
else
{
Interlocked.Increment(ref _cacheMisses);
uncachedPurls.Add(purlList[i]);
}
}
// Fetch uncached PURLs from database in parallel
if (uncachedPurls.Count > 0)
{
var semaphore = new SemaphoreSlim(16);
var tasks = uncachedPurls.Select(async purl =>
{
await semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var advisories = await _canonicalService.GetByArtifactAsync(purl, cancellationToken)
.ConfigureAwait(false);
var ids = advisories.Select(a => a.Id).ToList();
results[purl] = ids;
// Cache the result
if (ids.Count > 0)
{
await db.StringSetAsync(GetPurlKey(purl), SerializeGuids(ids), DefaultTtl)
.ConfigureAwait(false);
}
}
finally
{
semaphore.Release();
}
});
await Task.WhenAll(tasks).ConfigureAwait(false);
}
return results;
}
/// <inheritdoc />
public async Task<bool> IsAffectedAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default)
{
var ids = await GetCanonicalIdsForPurlAsync(purl, cancellationToken).ConfigureAwait(false);
return ids.Contains(canonicalId);
}
/// <inheritdoc />
public async Task<PurlMatchInfo?> GetMatchInfoAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var isAffected = await IsAffectedAsync(purl, canonicalId, cancellationToken).ConfigureAwait(false);
if (!isAffected)
{
return null;
}
// Determine match method based on PURL format
var method = DetermineMatchMethod(purl);
return new PurlMatchInfo
{
Purl = purl,
CanonicalId = canonicalId,
Method = method,
Confidence = 1.0
};
}
/// <inheritdoc />
public async Task IndexCanonicalAsync(
Guid canonicalId,
string affectsKey,
string? versionConstraint = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(affectsKey))
{
return;
}
var db = _redis.GetDatabase();
var purlKey = GetPurlKey(affectsKey);
// Add canonical ID to PURL's set
var existing = await db.StringGetAsync(purlKey).ConfigureAwait(false);
var ids = existing.HasValue ? DeserializeGuids(existing!).ToList() : new List<Guid>();
if (!ids.Contains(canonicalId))
{
ids.Add(canonicalId);
await db.StringSetAsync(purlKey, SerializeGuids(ids), DefaultTtl).ConfigureAwait(false);
}
// Track which PURLs are indexed for this canonical (for unindexing)
var canonicalKey = GetCanonicalKey(canonicalId);
await db.SetAddAsync(canonicalKey, affectsKey).ConfigureAwait(false);
await db.KeyExpireAsync(canonicalKey, DefaultTtl).ConfigureAwait(false);
_logger.LogDebug("Indexed PURL {Purl} for canonical {CanonicalId}", affectsKey, canonicalId);
}
/// <inheritdoc />
public async Task IndexCanonicalBatchAsync(
IEnumerable<PurlIndexEntry> entries,
CancellationToken cancellationToken = default)
{
var entryList = entries.ToList();
if (entryList.Count == 0)
{
return;
}
var db = _redis.GetDatabase();
var batch = db.CreateBatch();
var tasks = new List<Task>();
// Group by PURL
var byPurl = entryList.GroupBy(e => e.AffectsKey);
foreach (var group in byPurl)
{
var purlKey = GetPurlKey(group.Key);
var canonicalIds = group.Select(e => e.CanonicalId).Distinct().ToList();
// Get existing and merge
var existingTask = db.StringGetAsync(purlKey);
tasks.Add(existingTask.ContinueWith(async t =>
{
var existing = t.Result;
var ids = existing.HasValue ? DeserializeGuids(existing!).ToList() : new List<Guid>();
ids.AddRange(canonicalIds.Where(id => !ids.Contains(id)));
await db.StringSetAsync(purlKey, SerializeGuids(ids), DefaultTtl).ConfigureAwait(false);
}, cancellationToken).Unwrap());
}
// Track canonical → PURLs mappings
var byCanonical = entryList.GroupBy(e => e.CanonicalId);
foreach (var group in byCanonical)
{
var canonicalKey = GetCanonicalKey(group.Key);
var purls = group.Select(e => (RedisValue)e.AffectsKey).ToArray();
tasks.Add(db.SetAddAsync(canonicalKey, purls));
tasks.Add(db.KeyExpireAsync(canonicalKey, DefaultTtl));
}
batch.Execute();
await Task.WhenAll(tasks).ConfigureAwait(false);
_logger.LogInformation("Indexed {EntryCount} PURL→canonical mappings", entryList.Count);
}
/// <inheritdoc />
public async Task UnindexCanonicalAsync(
Guid canonicalId,
CancellationToken cancellationToken = default)
{
var db = _redis.GetDatabase();
var canonicalKey = GetCanonicalKey(canonicalId);
// Get all PURLs indexed for this canonical
var purls = await db.SetMembersAsync(canonicalKey).ConfigureAwait(false);
foreach (var purl in purls)
{
var purlKey = GetPurlKey(purl!);
var existing = await db.StringGetAsync(purlKey).ConfigureAwait(false);
if (existing.HasValue)
{
var ids = DeserializeGuids(existing!).Where(id => id != canonicalId).ToList();
if (ids.Count > 0)
{
await db.StringSetAsync(purlKey, SerializeGuids(ids), DefaultTtl).ConfigureAwait(false);
}
else
{
await db.KeyDeleteAsync(purlKey).ConfigureAwait(false);
}
}
}
// Remove the canonical's PURL set
await db.KeyDeleteAsync(canonicalKey).ConfigureAwait(false);
_logger.LogDebug("Unindexed canonical {CanonicalId} from {PurlCount} PURLs", canonicalId, purls.Length);
}
/// <inheritdoc />
public async Task<PurlIndexStats> GetStatsAsync(CancellationToken cancellationToken = default)
{
var db = _redis.GetDatabase();
var server = _redis.GetServers().FirstOrDefault();
if (server is null)
{
return new PurlIndexStats();
}
// Count keys by pattern
var purlCount = 0L;
var canonicalCount = 0L;
await foreach (var key in server.KeysAsync(pattern: $"{PurlKeyPrefix}*"))
{
purlCount++;
}
await foreach (var key in server.KeysAsync(pattern: $"{CanonicalKeyPrefix}*"))
{
canonicalCount++;
}
var totalRequests = _cacheHits + _cacheMisses;
var hitRate = totalRequests > 0 ? (double)_cacheHits / totalRequests : 0;
return new PurlIndexStats
{
TotalPurls = purlCount,
TotalCanonicals = canonicalCount,
TotalMappings = purlCount, // Approximation
CacheHitRate = hitRate,
LastUpdatedAt = DateTimeOffset.UtcNow
};
}
private static string GetPurlKey(string purl) => $"{PurlKeyPrefix}{NormalizePurl(purl)}";
private static string GetCanonicalKey(Guid canonicalId) => $"{CanonicalKeyPrefix}{canonicalId}";
private static string NormalizePurl(string purl)
{
// Normalize PURL for consistent caching
var normalized = purl.Trim().ToLowerInvariant();
// Remove qualifiers for base key
var qualifierIndex = normalized.IndexOf('?');
if (qualifierIndex > 0)
{
normalized = normalized[..qualifierIndex];
}
return normalized;
}
private static MatchMethod DetermineMatchMethod(string purl)
{
if (purl.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
{
return MatchMethod.Cpe;
}
if (purl.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return purl.Contains('@') ? MatchMethod.ExactPurl : MatchMethod.NameVersion;
}
return MatchMethod.NameVersion;
}
private static string SerializeGuids(IEnumerable<Guid> guids)
{
return JsonSerializer.Serialize(guids.Select(g => g.ToString()));
}
private static IReadOnlyList<Guid> DeserializeGuids(string json)
{
try
{
var strings = JsonSerializer.Deserialize<List<string>>(json);
return strings?.Select(Guid.Parse).ToList() ?? [];
}
catch
{
return [];
}
}
}

View File

@@ -0,0 +1,270 @@
// -----------------------------------------------------------------------------
// SbomAdvisoryMatcher.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Tasks: SBOM-8200-008, SBOM-8200-009
// Description: Implementation for matching SBOM components against advisories
// -----------------------------------------------------------------------------
using System.Collections.Concurrent;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration.Matching;
/// <summary>
/// Service for matching SBOM components against canonical advisories.
/// </summary>
public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
{
private readonly ICanonicalAdvisoryService _canonicalService;
private readonly ILogger<SbomAdvisoryMatcher> _logger;
public SbomAdvisoryMatcher(
ICanonicalAdvisoryService canonicalService,
ILogger<SbomAdvisoryMatcher> logger)
{
_canonicalService = canonicalService ?? throw new ArgumentNullException(nameof(canonicalService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<IReadOnlyList<SbomAdvisoryMatch>> MatchAsync(
Guid sbomId,
string sbomDigest,
IEnumerable<string> purls,
IReadOnlyDictionary<string, bool>? reachabilityMap = null,
IReadOnlyDictionary<string, bool>? deploymentMap = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(purls);
var purlList = purls.ToList();
if (purlList.Count == 0)
{
return [];
}
_logger.LogDebug("Matching {PurlCount} PURLs against canonical advisories", purlList.Count);
var matches = new ConcurrentBag<SbomAdvisoryMatch>();
var matchedCount = 0;
// Process PURLs in parallel with bounded concurrency
var semaphore = new SemaphoreSlim(16); // Max 16 concurrent lookups
var tasks = purlList.Select(async purl =>
{
await semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var purlMatches = await MatchPurlAsync(
sbomId,
sbomDigest,
purl,
reachabilityMap,
deploymentMap,
cancellationToken).ConfigureAwait(false);
foreach (var match in purlMatches)
{
matches.Add(match);
Interlocked.Increment(ref matchedCount);
}
}
finally
{
semaphore.Release();
}
});
await Task.WhenAll(tasks).ConfigureAwait(false);
_logger.LogInformation(
"Found {MatchCount} advisory matches for SBOM {SbomDigest} across {PurlCount} PURLs",
matches.Count, sbomDigest, purlList.Count);
return matches.ToList();
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> FindAffectingCanonicalIdsAsync(
string purl,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(purl))
{
return [];
}
var advisories = await _canonicalService.GetByArtifactAsync(purl, cancellationToken)
.ConfigureAwait(false);
return advisories.Select(a => a.Id).ToList();
}
/// <inheritdoc />
public async Task<SbomAdvisoryMatch?> CheckMatchAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var advisory = await _canonicalService.GetByIdAsync(canonicalId, cancellationToken)
.ConfigureAwait(false);
if (advisory is null)
{
return null;
}
// Check if this advisory affects the given PURL
var affectsThisPurl = IsArtifactMatch(purl, advisory.AffectsKey);
if (!affectsThisPurl)
{
return null;
}
return new SbomAdvisoryMatch
{
Id = Guid.NewGuid(),
SbomId = Guid.Empty, // Not applicable for single check
SbomDigest = string.Empty,
CanonicalId = canonicalId,
Purl = purl,
Method = DetermineMatchMethod(purl),
IsReachable = false,
IsDeployed = false,
MatchedAt = DateTimeOffset.UtcNow
};
}
private async Task<IReadOnlyList<SbomAdvisoryMatch>> MatchPurlAsync(
Guid sbomId,
string sbomDigest,
string purl,
IReadOnlyDictionary<string, bool>? reachabilityMap,
IReadOnlyDictionary<string, bool>? deploymentMap,
CancellationToken cancellationToken)
{
try
{
var advisories = await _canonicalService.GetByArtifactAsync(purl, cancellationToken)
.ConfigureAwait(false);
if (advisories.Count == 0)
{
return [];
}
var isReachable = reachabilityMap?.TryGetValue(purl, out var reachable) == true && reachable;
var isDeployed = deploymentMap?.TryGetValue(purl, out var deployed) == true && deployed;
var matchMethod = DetermineMatchMethod(purl);
return advisories.Select(advisory => new SbomAdvisoryMatch
{
Id = Guid.NewGuid(),
SbomId = sbomId,
SbomDigest = sbomDigest,
CanonicalId = advisory.Id,
Purl = purl,
Method = matchMethod,
IsReachable = isReachable,
IsDeployed = isDeployed,
MatchedAt = DateTimeOffset.UtcNow
}).ToList();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to match PURL {Purl} against advisories", purl);
return [];
}
}
private static MatchMethod DetermineMatchMethod(string purl)
{
// CPE-based matching for OS packages
if (purl.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
{
return MatchMethod.Cpe;
}
// PURL-based matching
if (purl.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
// If contains version, it's an exact match
if (purl.Contains('@'))
{
return MatchMethod.ExactPurl;
}
return MatchMethod.NameVersion;
}
return MatchMethod.NameVersion;
}
private static bool IsArtifactMatch(string purl, string affectedArtifact)
{
// Exact match
if (string.Equals(purl, affectedArtifact, StringComparison.OrdinalIgnoreCase))
{
return true;
}
// Normalize and compare
var normalizedPurl = NormalizePurl(purl);
var normalizedAffected = NormalizePurl(affectedArtifact);
if (string.Equals(normalizedPurl, normalizedAffected, StringComparison.OrdinalIgnoreCase))
{
return true;
}
// Check if affected artifact is a prefix match (package without version)
if (normalizedPurl.StartsWith(normalizedAffected, StringComparison.OrdinalIgnoreCase))
{
// Ensure it's actually a version boundary (@ or end of string)
var remaining = normalizedPurl[normalizedAffected.Length..];
if (remaining.Length == 0 || remaining[0] == '@')
{
return true;
}
}
return false;
}
private static string NormalizePurl(string purl)
{
// Remove trailing slashes, normalize case for scheme
var normalized = purl.Trim();
// Normalize pkg: prefix
if (normalized.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
normalized = "pkg:" + normalized[4..];
}
// Remove qualifiers for comparison (everything after ?)
var qualifierIndex = normalized.IndexOf('?');
if (qualifierIndex > 0)
{
normalized = normalized[..qualifierIndex];
}
// Remove subpath for comparison (everything after #)
var subpathIndex = normalized.IndexOf('#');
if (subpathIndex > 0)
{
normalized = normalized[..subpathIndex];
}
return normalized;
}
}

View File

@@ -0,0 +1,195 @@
// -----------------------------------------------------------------------------
// SbomRegistration.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-000
// Description: Domain model for SBOM registration
// -----------------------------------------------------------------------------
namespace StellaOps.Concelier.SbomIntegration.Models;
/// <summary>
/// Registered SBOM for advisory matching.
/// </summary>
public sealed record SbomRegistration
{
/// <summary>Registration identifier.</summary>
public Guid Id { get; init; }
/// <summary>SBOM content digest (SHA-256).</summary>
public required string Digest { get; init; }
/// <summary>SBOM format: CycloneDX or SPDX.</summary>
public required SbomFormat Format { get; init; }
/// <summary>SBOM specification version.</summary>
public required string SpecVersion { get; init; }
/// <summary>Primary component name (e.g., container image name).</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>Total component count in SBOM.</summary>
public int ComponentCount { get; init; }
/// <summary>Extracted PURLs from SBOM components.</summary>
public IReadOnlyList<string> Purls { get; init; } = [];
/// <summary>When the SBOM was registered.</summary>
public DateTimeOffset RegisteredAt { get; init; }
/// <summary>When the SBOM was last matched against advisories.</summary>
public DateTimeOffset? LastMatchedAt { get; init; }
/// <summary>Number of advisories affecting this SBOM.</summary>
public int AffectedCount { get; init; }
/// <summary>Source of the SBOM (scanner, upload, etc.).</summary>
public required string Source { get; init; }
/// <summary>Optional tenant ID for multi-tenant deployments.</summary>
public string? TenantId { get; init; }
}
/// <summary>
/// SBOM format type.
/// </summary>
public enum SbomFormat
{
/// <summary>CycloneDX format.</summary>
CycloneDX,
/// <summary>SPDX format.</summary>
SPDX
}
/// <summary>
/// Result of matching an SBOM against advisories.
/// Distinct from <see cref="StellaOps.Concelier.Interest.Models.SbomMatch"/> which is simpler for score calculation.
/// </summary>
public sealed record SbomAdvisoryMatch
{
/// <summary>Match identifier.</summary>
public Guid Id { get; init; }
/// <summary>SBOM registration ID.</summary>
public required Guid SbomId { get; init; }
/// <summary>SBOM digest.</summary>
public required string SbomDigest { get; init; }
/// <summary>Canonical advisory ID.</summary>
public required Guid CanonicalId { get; init; }
/// <summary>Matched PURL from SBOM.</summary>
public required string Purl { get; init; }
/// <summary>Whether the code path is reachable.</summary>
public bool IsReachable { get; init; }
/// <summary>Whether deployed in production environment.</summary>
public bool IsDeployed { get; init; }
/// <summary>Match confidence (0-1).</summary>
public double Confidence { get; init; } = 1.0;
/// <summary>How the match was determined.</summary>
public required MatchMethod Method { get; init; }
/// <summary>When the match was recorded.</summary>
public DateTimeOffset MatchedAt { get; init; }
}
/// <summary>
/// Method used to match SBOM component to advisory.
/// </summary>
public enum MatchMethod
{
/// <summary>Exact PURL match.</summary>
ExactPurl,
/// <summary>PURL with version range match.</summary>
PurlVersionRange,
/// <summary>CPE-based match for OS packages.</summary>
Cpe,
/// <summary>Component name and version heuristic.</summary>
NameVersion
}
/// <summary>
/// Input for registering an SBOM.
/// </summary>
public sealed record SbomRegistrationInput
{
/// <summary>SBOM content digest.</summary>
public required string Digest { get; init; }
/// <summary>SBOM format.</summary>
public required SbomFormat Format { get; init; }
/// <summary>SBOM specification version.</summary>
public required string SpecVersion { get; init; }
/// <summary>Primary component name.</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>List of PURLs extracted from SBOM.</summary>
public required IReadOnlyList<string> Purls { get; init; }
/// <summary>Source of registration.</summary>
public required string Source { get; init; }
/// <summary>Optional tenant ID.</summary>
public string? TenantId { get; init; }
/// <summary>Optional reachability data per PURL.</summary>
public IReadOnlyDictionary<string, bool>? ReachabilityMap { get; init; }
/// <summary>Optional deployment status per PURL.</summary>
public IReadOnlyDictionary<string, bool>? DeploymentMap { get; init; }
}
/// <summary>
/// Result of SBOM learning (registration + matching + scoring).
/// </summary>
public sealed record SbomLearnResult
{
/// <summary>SBOM registration.</summary>
public required SbomRegistration Registration { get; init; }
/// <summary>Matches found against advisories.</summary>
public required IReadOnlyList<SbomAdvisoryMatch> Matches { get; init; }
/// <summary>Number of interest scores updated.</summary>
public int ScoresUpdated { get; init; }
/// <summary>Processing time in milliseconds.</summary>
public double ProcessingTimeMs { get; init; }
}
/// <summary>
/// Delta changes to an existing SBOM.
/// </summary>
public sealed record SbomDeltaInput
{
/// <summary>PURLs to add to the SBOM.</summary>
public IReadOnlyList<string> AddedPurls { get; init; } = [];
/// <summary>PURLs to remove from the SBOM.</summary>
public IReadOnlyList<string> RemovedPurls { get; init; } = [];
/// <summary>Optional updated reachability data.</summary>
public IReadOnlyDictionary<string, bool>? ReachabilityMap { get; init; }
/// <summary>Optional updated deployment status.</summary>
public IReadOnlyDictionary<string, bool>? DeploymentMap { get; init; }
/// <summary>Whether this delta represents a complete replacement.</summary>
public bool IsFullReplacement { get; init; }
}

View File

@@ -0,0 +1,90 @@
// -----------------------------------------------------------------------------
// ISbomParser.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-005
// Description: Interface for SBOM parsing and PURL extraction
// -----------------------------------------------------------------------------
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration.Parsing;
/// <summary>
/// Service for parsing SBOM content and extracting package identifiers.
/// </summary>
public interface ISbomParser
{
/// <summary>
/// Extracts PURLs from SBOM content.
/// </summary>
/// <param name="content">SBOM content stream.</param>
/// <param name="format">SBOM format (CycloneDX or SPDX).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Parsing result with extracted PURLs.</returns>
Task<SbomParseResult> ParseAsync(
Stream content,
SbomFormat format,
CancellationToken cancellationToken = default);
/// <summary>
/// Detects the SBOM format from content.
/// </summary>
/// <param name="content">SBOM content stream.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Detected format and spec version.</returns>
Task<SbomFormatInfo> DetectFormatAsync(
Stream content,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of SBOM parsing.
/// </summary>
public sealed record SbomParseResult
{
/// <summary>List of extracted PURLs.</summary>
public required IReadOnlyList<string> Purls { get; init; }
/// <summary>List of extracted CPEs (for OS packages).</summary>
public IReadOnlyList<string> Cpes { get; init; } = [];
/// <summary>Primary component name (e.g., image name).</summary>
public string? PrimaryName { get; init; }
/// <summary>Primary component version.</summary>
public string? PrimaryVersion { get; init; }
/// <summary>Total component count in SBOM.</summary>
public int TotalComponents { get; init; }
/// <summary>Components without PURL (name/version only).</summary>
public IReadOnlyList<ComponentInfo> UnresolvedComponents { get; init; } = [];
/// <summary>Parsing warnings (non-fatal issues).</summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
}
/// <summary>
/// Information about a component without PURL.
/// </summary>
public sealed record ComponentInfo
{
public required string Name { get; init; }
public string? Version { get; init; }
public string? Type { get; init; }
}
/// <summary>
/// Detected SBOM format information.
/// </summary>
public sealed record SbomFormatInfo
{
/// <summary>SBOM format.</summary>
public SbomFormat Format { get; init; }
/// <summary>Specification version (e.g., "1.5" for CycloneDX).</summary>
public string? SpecVersion { get; init; }
/// <summary>Whether format was successfully detected.</summary>
public bool IsDetected { get; init; }
}

View File

@@ -0,0 +1,517 @@
// -----------------------------------------------------------------------------
// SbomParser.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-005
// Description: SBOM parser for CycloneDX and SPDX formats
// -----------------------------------------------------------------------------
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration.Parsing;
/// <summary>
/// Parser for extracting PURLs and metadata from SBOM documents.
/// Supports CycloneDX (1.4-1.6) and SPDX (2.2-2.3, 3.0).
/// </summary>
public sealed class SbomParser : ISbomParser
{
private readonly ILogger<SbomParser> _logger;
public SbomParser(ILogger<SbomParser> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<SbomParseResult> ParseAsync(
Stream content,
SbomFormat format,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
// Ensure stream is at beginning
if (content.CanSeek)
{
content.Position = 0;
}
return format switch
{
SbomFormat.CycloneDX => await ParseCycloneDxAsync(content, cancellationToken).ConfigureAwait(false),
SbomFormat.SPDX => await ParseSpdxAsync(content, cancellationToken).ConfigureAwait(false),
_ => throw new ArgumentException($"Unsupported SBOM format: {format}", nameof(format))
};
}
/// <inheritdoc />
public async Task<SbomFormatInfo> DetectFormatAsync(
Stream content,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(content);
if (content.CanSeek)
{
content.Position = 0;
}
try
{
using var doc = await JsonDocument.ParseAsync(content, cancellationToken: cancellationToken)
.ConfigureAwait(false);
var root = doc.RootElement;
// Check for CycloneDX
if (root.TryGetProperty("bomFormat", out var bomFormat) &&
bomFormat.GetString()?.Equals("CycloneDX", StringComparison.OrdinalIgnoreCase) == true)
{
var specVersion = root.TryGetProperty("specVersion", out var sv) ? sv.GetString() : null;
return new SbomFormatInfo
{
Format = SbomFormat.CycloneDX,
SpecVersion = specVersion,
IsDetected = true
};
}
// Check for SPDX 2.x
if (root.TryGetProperty("spdxVersion", out var spdxVersion))
{
return new SbomFormatInfo
{
Format = SbomFormat.SPDX,
SpecVersion = spdxVersion.GetString(),
IsDetected = true
};
}
// Check for SPDX 3.0 (@context indicates JSON-LD)
if (root.TryGetProperty("@context", out var context))
{
var contextStr = context.ValueKind == JsonValueKind.String
? context.GetString()
: context.ToString();
if (contextStr?.Contains("spdx", StringComparison.OrdinalIgnoreCase) == true)
{
return new SbomFormatInfo
{
Format = SbomFormat.SPDX,
SpecVersion = "3.0",
IsDetected = true
};
}
}
return new SbomFormatInfo { IsDetected = false };
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to parse SBOM content as JSON");
return new SbomFormatInfo { IsDetected = false };
}
}
private async Task<SbomParseResult> ParseCycloneDxAsync(
Stream content,
CancellationToken cancellationToken)
{
using var doc = await JsonDocument.ParseAsync(content, cancellationToken: cancellationToken)
.ConfigureAwait(false);
var root = doc.RootElement;
var purls = new List<string>();
var cpes = new List<string>();
var unresolvedComponents = new List<ComponentInfo>();
var warnings = new List<string>();
string? primaryName = null;
string? primaryVersion = null;
int totalComponents = 0;
// Get primary component from metadata
if (root.TryGetProperty("metadata", out var metadata) &&
metadata.TryGetProperty("component", out var primaryComponent))
{
primaryName = primaryComponent.TryGetProperty("name", out var name) ? name.GetString() : null;
primaryVersion = primaryComponent.TryGetProperty("version", out var version) ? version.GetString() : null;
// Primary component may also have a PURL
if (primaryComponent.TryGetProperty("purl", out var primaryPurl))
{
var purlStr = primaryPurl.GetString();
if (!string.IsNullOrWhiteSpace(purlStr))
{
purls.Add(purlStr);
}
}
}
// Parse components array
if (root.TryGetProperty("components", out var components))
{
foreach (var component in components.EnumerateArray())
{
totalComponents++;
ParseCycloneDxComponent(component, purls, cpes, unresolvedComponents, warnings);
}
}
// Parse nested components (CycloneDX supports component hierarchy)
ParseNestedComponents(root, purls, cpes, unresolvedComponents, warnings, ref totalComponents);
_logger.LogDebug(
"Parsed CycloneDX SBOM: {PurlCount} PURLs, {CpeCount} CPEs, {UnresolvedCount} unresolved from {TotalCount} components",
purls.Count, cpes.Count, unresolvedComponents.Count, totalComponents);
return new SbomParseResult
{
Purls = purls.Distinct().ToList(),
Cpes = cpes.Distinct().ToList(),
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
TotalComponents = totalComponents,
UnresolvedComponents = unresolvedComponents,
Warnings = warnings
};
}
private void ParseCycloneDxComponent(
JsonElement component,
List<string> purls,
List<string> cpes,
List<ComponentInfo> unresolved,
List<string> warnings)
{
var hasPurl = false;
// Extract PURL
if (component.TryGetProperty("purl", out var purl))
{
var purlStr = purl.GetString();
if (!string.IsNullOrWhiteSpace(purlStr))
{
purls.Add(purlStr);
hasPurl = true;
}
}
// Extract CPE (from cpe property or externalReferences)
if (component.TryGetProperty("cpe", out var cpe))
{
var cpeStr = cpe.GetString();
if (!string.IsNullOrWhiteSpace(cpeStr))
{
cpes.Add(cpeStr);
}
}
// Check externalReferences for additional CPEs
if (component.TryGetProperty("externalReferences", out var extRefs))
{
foreach (var extRef in extRefs.EnumerateArray())
{
if (extRef.TryGetProperty("type", out var type) &&
type.GetString()?.Equals("cpe", StringComparison.OrdinalIgnoreCase) == true &&
extRef.TryGetProperty("url", out var url))
{
var cpeStr = url.GetString();
if (!string.IsNullOrWhiteSpace(cpeStr))
{
cpes.Add(cpeStr);
}
}
}
}
// Track unresolved components (no PURL)
if (!hasPurl)
{
var name = component.TryGetProperty("name", out var n) ? n.GetString() : null;
var version = component.TryGetProperty("version", out var v) ? v.GetString() : null;
var componentType = component.TryGetProperty("type", out var t) ? t.GetString() : null;
if (!string.IsNullOrWhiteSpace(name))
{
unresolved.Add(new ComponentInfo
{
Name = name,
Version = version,
Type = componentType
});
}
}
// Recursively parse nested components
if (component.TryGetProperty("components", out var nestedComponents))
{
foreach (var nested in nestedComponents.EnumerateArray())
{
ParseCycloneDxComponent(nested, purls, cpes, unresolved, warnings);
}
}
}
private void ParseNestedComponents(
JsonElement root,
List<string> purls,
List<string> cpes,
List<ComponentInfo> unresolved,
List<string> warnings,
ref int totalComponents)
{
// CycloneDX 1.5+ supports dependencies with nested refs
if (root.TryGetProperty("dependencies", out var dependencies))
{
// Dependencies section doesn't contain component data, just refs
// Already handled through components traversal
}
// Check for compositions (CycloneDX 1.4+)
if (root.TryGetProperty("compositions", out var compositions))
{
// Compositions define relationships but don't add new components
}
}
private async Task<SbomParseResult> ParseSpdxAsync(
Stream content,
CancellationToken cancellationToken)
{
using var doc = await JsonDocument.ParseAsync(content, cancellationToken: cancellationToken)
.ConfigureAwait(false);
var root = doc.RootElement;
var purls = new List<string>();
var cpes = new List<string>();
var unresolvedComponents = new List<ComponentInfo>();
var warnings = new List<string>();
string? primaryName = null;
string? primaryVersion = null;
int totalComponents = 0;
// Detect SPDX version
var isSpdx3 = root.TryGetProperty("@context", out _);
if (isSpdx3)
{
return await ParseSpdx3Async(root, cancellationToken).ConfigureAwait(false);
}
// SPDX 2.x parsing
// Get document name as primary
if (root.TryGetProperty("name", out var docName))
{
primaryName = docName.GetString();
}
// Parse packages
if (root.TryGetProperty("packages", out var packages))
{
foreach (var package in packages.EnumerateArray())
{
totalComponents++;
ParseSpdxPackage(package, purls, cpes, unresolvedComponents, warnings);
// First package is often the primary
if (primaryName is null && package.TryGetProperty("name", out var pkgName))
{
primaryName = pkgName.GetString();
primaryVersion = package.TryGetProperty("versionInfo", out var v) ? v.GetString() : null;
}
}
}
_logger.LogDebug(
"Parsed SPDX SBOM: {PurlCount} PURLs, {CpeCount} CPEs, {UnresolvedCount} unresolved from {TotalCount} packages",
purls.Count, cpes.Count, unresolvedComponents.Count, totalComponents);
return new SbomParseResult
{
Purls = purls.Distinct().ToList(),
Cpes = cpes.Distinct().ToList(),
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
TotalComponents = totalComponents,
UnresolvedComponents = unresolvedComponents,
Warnings = warnings
};
}
private void ParseSpdxPackage(
JsonElement package,
List<string> purls,
List<string> cpes,
List<ComponentInfo> unresolved,
List<string> warnings)
{
var hasPurl = false;
// Extract from externalRefs
if (package.TryGetProperty("externalRefs", out var extRefs))
{
foreach (var extRef in extRefs.EnumerateArray())
{
var refType = extRef.TryGetProperty("referenceType", out var rt) ? rt.GetString() : null;
var refCategory = extRef.TryGetProperty("referenceCategory", out var rc) ? rc.GetString() : null;
var locator = extRef.TryGetProperty("referenceLocator", out var loc) ? loc.GetString() : null;
if (string.IsNullOrWhiteSpace(locator))
continue;
// PURL reference
if (refType?.Equals("purl", StringComparison.OrdinalIgnoreCase) == true ||
refCategory?.Equals("PACKAGE-MANAGER", StringComparison.OrdinalIgnoreCase) == true)
{
if (locator.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
purls.Add(locator);
hasPurl = true;
}
}
// CPE reference
if (refType?.StartsWith("cpe", StringComparison.OrdinalIgnoreCase) == true ||
refCategory?.Equals("SECURITY", StringComparison.OrdinalIgnoreCase) == true)
{
if (locator.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
{
cpes.Add(locator);
}
}
}
}
// Track unresolved packages (no PURL)
if (!hasPurl)
{
var name = package.TryGetProperty("name", out var n) ? n.GetString() : null;
var version = package.TryGetProperty("versionInfo", out var v) ? v.GetString() : null;
if (!string.IsNullOrWhiteSpace(name))
{
unresolved.Add(new ComponentInfo
{
Name = name,
Version = version,
Type = "package"
});
}
}
}
private Task<SbomParseResult> ParseSpdx3Async(
JsonElement root,
CancellationToken cancellationToken)
{
var purls = new List<string>();
var cpes = new List<string>();
var unresolvedComponents = new List<ComponentInfo>();
var warnings = new List<string>();
string? primaryName = null;
string? primaryVersion = null;
int totalComponents = 0;
// SPDX 3.0 uses "@graph" for elements
if (root.TryGetProperty("@graph", out var graph))
{
foreach (var element in graph.EnumerateArray())
{
var elementType = element.TryGetProperty("@type", out var t) ? t.GetString() : null;
// Skip non-package elements
if (elementType is null ||
(!elementType.Contains("Package", StringComparison.OrdinalIgnoreCase) &&
!elementType.Contains("Software", StringComparison.OrdinalIgnoreCase)))
{
continue;
}
totalComponents++;
var hasPurl = false;
// SPDX 3.0 uses packageUrl property directly
if (element.TryGetProperty("packageUrl", out var purl))
{
var purlStr = purl.GetString();
if (!string.IsNullOrWhiteSpace(purlStr))
{
purls.Add(purlStr);
hasPurl = true;
}
}
// Check externalIdentifier array
if (element.TryGetProperty("externalIdentifier", out var extIds))
{
foreach (var extId in extIds.EnumerateArray())
{
var idType = extId.TryGetProperty("externalIdentifierType", out var eit)
? eit.GetString()
: null;
var idValue = extId.TryGetProperty("identifier", out var id) ? id.GetString() : null;
if (string.IsNullOrWhiteSpace(idValue))
continue;
if (idType?.Equals("purl", StringComparison.OrdinalIgnoreCase) == true)
{
purls.Add(idValue);
hasPurl = true;
}
else if (idType?.StartsWith("cpe", StringComparison.OrdinalIgnoreCase) == true)
{
cpes.Add(idValue);
}
}
}
// Track unresolved
if (!hasPurl)
{
var name = element.TryGetProperty("name", out var n) ? n.GetString() : null;
var version = element.TryGetProperty("packageVersion", out var v)
? v.GetString()
: element.TryGetProperty("softwareVersion", out var sv)
? sv.GetString()
: null;
if (!string.IsNullOrWhiteSpace(name))
{
unresolvedComponents.Add(new ComponentInfo
{
Name = name,
Version = version,
Type = elementType
});
}
}
// Get primary from first package
if (primaryName is null)
{
primaryName = element.TryGetProperty("name", out var n) ? n.GetString() : null;
primaryVersion = element.TryGetProperty("packageVersion", out var v) ? v.GetString() : null;
}
}
}
_logger.LogDebug(
"Parsed SPDX 3.0 SBOM: {PurlCount} PURLs, {CpeCount} CPEs, {UnresolvedCount} unresolved from {TotalCount} elements",
purls.Count, cpes.Count, unresolvedComponents.Count, totalComponents);
return Task.FromResult(new SbomParseResult
{
Purls = purls.Distinct().ToList(),
Cpes = cpes.Distinct().ToList(),
PrimaryName = primaryName,
PrimaryVersion = primaryVersion,
TotalComponents = totalComponents,
UnresolvedComponents = unresolvedComponents,
Warnings = warnings
});
}
}

View File

@@ -0,0 +1,270 @@
// -----------------------------------------------------------------------------
// SbomAdvisoryMatcher.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Tasks: SBOM-8200-008, SBOM-8200-009
// Description: Implementation for matching SBOM components against advisories
// -----------------------------------------------------------------------------
using System.Collections.Concurrent;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Canonical;
using StellaOps.Concelier.SbomIntegration.Models;
namespace StellaOps.Concelier.SbomIntegration;
/// <summary>
/// Service for matching SBOM components against canonical advisories.
/// </summary>
public sealed class SbomAdvisoryMatcher : ISbomAdvisoryMatcher
{
private readonly ICanonicalAdvisoryService _canonicalService;
private readonly ILogger<SbomAdvisoryMatcher> _logger;
public SbomAdvisoryMatcher(
ICanonicalAdvisoryService canonicalService,
ILogger<SbomAdvisoryMatcher> logger)
{
_canonicalService = canonicalService ?? throw new ArgumentNullException(nameof(canonicalService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<IReadOnlyList<SbomAdvisoryMatch>> MatchAsync(
Guid sbomId,
string sbomDigest,
IEnumerable<string> purls,
IReadOnlyDictionary<string, bool>? reachabilityMap = null,
IReadOnlyDictionary<string, bool>? deploymentMap = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(purls);
var purlList = purls.ToList();
if (purlList.Count == 0)
{
return [];
}
_logger.LogDebug("Matching {PurlCount} PURLs against canonical advisories", purlList.Count);
var matches = new ConcurrentBag<SbomAdvisoryMatch>();
var matchedCount = 0;
// Process PURLs in parallel with bounded concurrency
var semaphore = new SemaphoreSlim(16); // Max 16 concurrent lookups
var tasks = purlList.Select(async purl =>
{
await semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var purlMatches = await MatchPurlAsync(
sbomId,
sbomDigest,
purl,
reachabilityMap,
deploymentMap,
cancellationToken).ConfigureAwait(false);
foreach (var match in purlMatches)
{
matches.Add(match);
Interlocked.Increment(ref matchedCount);
}
}
finally
{
semaphore.Release();
}
});
await Task.WhenAll(tasks).ConfigureAwait(false);
_logger.LogInformation(
"Found {MatchCount} advisory matches for SBOM {SbomDigest} across {PurlCount} PURLs",
matches.Count, sbomDigest, purlList.Count);
return matches.ToList();
}
/// <inheritdoc />
public async Task<IReadOnlyList<Guid>> FindAffectingCanonicalIdsAsync(
string purl,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(purl))
{
return [];
}
var advisories = await _canonicalService.GetByArtifactAsync(purl, cancellationToken)
.ConfigureAwait(false);
return advisories.Select(a => a.Id).ToList();
}
/// <inheritdoc />
public async Task<SbomAdvisoryMatch?> CheckMatchAsync(
string purl,
Guid canonicalId,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var advisory = await _canonicalService.GetByIdAsync(canonicalId, cancellationToken)
.ConfigureAwait(false);
if (advisory is null)
{
return null;
}
// Check if this advisory affects the given PURL
var affectsThisPurl = IsArtifactMatch(purl, advisory.AffectsKey);
if (!affectsThisPurl)
{
return null;
}
return new SbomAdvisoryMatch
{
Id = Guid.NewGuid(),
SbomId = Guid.Empty, // Not applicable for single check
SbomDigest = string.Empty,
CanonicalId = canonicalId,
Purl = purl,
Method = DetermineMatchMethod(purl),
IsReachable = false,
IsDeployed = false,
MatchedAt = DateTimeOffset.UtcNow
};
}
private async Task<IReadOnlyList<SbomAdvisoryMatch>> MatchPurlAsync(
Guid sbomId,
string sbomDigest,
string purl,
IReadOnlyDictionary<string, bool>? reachabilityMap,
IReadOnlyDictionary<string, bool>? deploymentMap,
CancellationToken cancellationToken)
{
try
{
var advisories = await _canonicalService.GetByArtifactAsync(purl, cancellationToken)
.ConfigureAwait(false);
if (advisories.Count == 0)
{
return [];
}
var isReachable = reachabilityMap?.TryGetValue(purl, out var reachable) == true && reachable;
var isDeployed = deploymentMap?.TryGetValue(purl, out var deployed) == true && deployed;
var matchMethod = DetermineMatchMethod(purl);
return advisories.Select(advisory => new SbomAdvisoryMatch
{
Id = Guid.NewGuid(),
SbomId = sbomId,
SbomDigest = sbomDigest,
CanonicalId = advisory.Id,
Purl = purl,
Method = matchMethod,
IsReachable = isReachable,
IsDeployed = isDeployed,
MatchedAt = DateTimeOffset.UtcNow
}).ToList();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to match PURL {Purl} against advisories", purl);
return [];
}
}
private static MatchMethod DetermineMatchMethod(string purl)
{
// CPE-based matching for OS packages
if (purl.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase))
{
return MatchMethod.Cpe;
}
// PURL-based matching
if (purl.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
// If contains version range markers, it's a range match
if (purl.Contains('@'))
{
return MatchMethod.ExactPurl;
}
return MatchMethod.NameVersion;
}
return MatchMethod.NameVersion;
}
private static bool IsArtifactMatch(string purl, string affectedArtifact)
{
// Exact match
if (string.Equals(purl, affectedArtifact, StringComparison.OrdinalIgnoreCase))
{
return true;
}
// Normalize and compare
var normalizedPurl = NormalizePurl(purl);
var normalizedAffected = NormalizePurl(affectedArtifact);
if (string.Equals(normalizedPurl, normalizedAffected, StringComparison.OrdinalIgnoreCase))
{
return true;
}
// Check if affected artifact is a prefix match (package without version)
if (normalizedPurl.StartsWith(normalizedAffected, StringComparison.OrdinalIgnoreCase))
{
// Ensure it's actually a version boundary (@ or end of string)
var remaining = normalizedPurl[normalizedAffected.Length..];
if (remaining.Length == 0 || remaining[0] == '@')
{
return true;
}
}
return false;
}
private static string NormalizePurl(string purl)
{
// Remove trailing slashes, normalize case for scheme
var normalized = purl.Trim();
// Normalize pkg: prefix
if (normalized.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
normalized = "pkg:" + normalized[4..];
}
// Remove qualifiers for comparison (everything after ?)
var qualifierIndex = normalized.IndexOf('?');
if (qualifierIndex > 0)
{
normalized = normalized[..qualifierIndex];
}
// Remove subpath for comparison (everything after #)
var subpathIndex = normalized.IndexOf('#');
if (subpathIndex > 0)
{
normalized = normalized[..subpathIndex];
}
return normalized;
}
}

View File

@@ -0,0 +1,529 @@
// -----------------------------------------------------------------------------
// SbomRegistryService.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Tasks: SBOM-8200-004, SBOM-8200-013
// Description: Service implementation for SBOM registration and advisory matching
// -----------------------------------------------------------------------------
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Interest;
using StellaOps.Concelier.SbomIntegration.Events;
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Concelier.SbomIntegration;
/// <summary>
/// Service for registering SBOMs and matching them against canonical advisories.
/// </summary>
public sealed class SbomRegistryService : ISbomRegistryService
{
private readonly ISbomRegistryRepository _repository;
private readonly ISbomAdvisoryMatcher _matcher;
private readonly IInterestScoringService _scoringService;
private readonly IEventStream<SbomLearnedEvent>? _eventStream;
private readonly ILogger<SbomRegistryService> _logger;
public SbomRegistryService(
ISbomRegistryRepository repository,
ISbomAdvisoryMatcher matcher,
IInterestScoringService scoringService,
ILogger<SbomRegistryService> logger,
IEventStream<SbomLearnedEvent>? eventStream = null)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_matcher = matcher ?? throw new ArgumentNullException(nameof(matcher));
_scoringService = scoringService ?? throw new ArgumentNullException(nameof(scoringService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_eventStream = eventStream;
}
#region Registration
/// <inheritdoc />
public async Task<SbomRegistration> RegisterSbomAsync(
SbomRegistrationInput input,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(input);
// Check for existing registration
var existing = await _repository.GetByDigestAsync(input.Digest, cancellationToken)
.ConfigureAwait(false);
if (existing is not null)
{
_logger.LogDebug(
"SBOM {Digest} already registered, returning existing registration",
input.Digest);
return existing;
}
var registration = new SbomRegistration
{
Id = Guid.NewGuid(),
Digest = input.Digest,
Format = input.Format,
SpecVersion = input.SpecVersion,
PrimaryName = input.PrimaryName,
PrimaryVersion = input.PrimaryVersion,
ComponentCount = input.Purls.Count,
Purls = input.Purls,
RegisteredAt = DateTimeOffset.UtcNow,
Source = input.Source,
TenantId = input.TenantId
};
await _repository.SaveAsync(registration, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Registered SBOM {Digest} with {ComponentCount} components from source {Source}",
input.Digest, registration.ComponentCount, input.Source);
return registration;
}
/// <inheritdoc />
public Task<SbomRegistration?> GetByDigestAsync(
string digest,
CancellationToken cancellationToken = default)
{
return _repository.GetByDigestAsync(digest, cancellationToken);
}
/// <inheritdoc />
public Task<SbomRegistration?> GetByIdAsync(
Guid id,
CancellationToken cancellationToken = default)
{
return _repository.GetByIdAsync(id, cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<SbomRegistration>> ListAsync(
int offset = 0,
int limit = 50,
string? tenantId = null,
CancellationToken cancellationToken = default)
{
return _repository.ListAsync(offset, limit, tenantId, cancellationToken);
}
/// <inheritdoc />
public async Task UnregisterAsync(
string digest,
CancellationToken cancellationToken = default)
{
var registration = await _repository.GetByDigestAsync(digest, cancellationToken)
.ConfigureAwait(false);
if (registration is not null)
{
await _repository.DeleteMatchesAsync(registration.Id, cancellationToken)
.ConfigureAwait(false);
}
await _repository.DeleteAsync(digest, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Unregistered SBOM {Digest}", digest);
}
#endregion
#region Learning (Full Flow)
/// <inheritdoc />
public async Task<SbomLearnResult> LearnSbomAsync(
SbomRegistrationInput input,
CancellationToken cancellationToken = default)
{
var sw = Stopwatch.StartNew();
// Step 1: Register SBOM
var registration = await RegisterSbomAsync(input, cancellationToken).ConfigureAwait(false);
// Step 2: Match against advisories
var matches = await _matcher.MatchAsync(
registration.Id,
registration.Digest,
input.Purls,
input.ReachabilityMap,
input.DeploymentMap,
cancellationToken).ConfigureAwait(false);
// Step 3: Save matches
await _repository.SaveMatchesAsync(registration.Id, matches, cancellationToken)
.ConfigureAwait(false);
// Step 4: Update registration metadata
await _repository.UpdateAffectedCountAsync(registration.Digest, matches.Count, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateLastMatchedAsync(registration.Digest, DateTimeOffset.UtcNow, cancellationToken)
.ConfigureAwait(false);
// Step 5: Update interest scores for affected canonicals
var affectedCanonicalIds = matches
.Select(m => m.CanonicalId)
.Distinct()
.ToList();
var scoresUpdated = 0;
foreach (var canonicalId in affectedCanonicalIds)
{
try
{
var matchesForCanonical = matches.Where(m => m.CanonicalId == canonicalId).ToList();
var isReachable = matchesForCanonical.Any(m => m.IsReachable);
var isDeployed = matchesForCanonical.Any(m => m.IsDeployed);
var purl = matchesForCanonical.First().Purl;
await _scoringService.RecordSbomMatchAsync(
canonicalId,
registration.Digest,
purl,
isReachable,
isDeployed,
cancellationToken).ConfigureAwait(false);
scoresUpdated++;
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to update interest score for canonical {CanonicalId}",
canonicalId);
}
}
sw.Stop();
_logger.LogInformation(
"Learned SBOM {Digest}: {MatchCount} matches, {ScoresUpdated} scores updated in {ElapsedMs}ms",
registration.Digest, matches.Count, scoresUpdated, sw.ElapsedMilliseconds);
var result = new SbomLearnResult
{
Registration = registration with
{
AffectedCount = matches.Count,
LastMatchedAt = DateTimeOffset.UtcNow
},
Matches = matches,
ScoresUpdated = scoresUpdated,
ProcessingTimeMs = sw.Elapsed.TotalMilliseconds
};
// Emit SbomLearned event
await EmitLearnedEventAsync(result, affectedCanonicalIds, isRematch: false, cancellationToken)
.ConfigureAwait(false);
return result;
}
/// <inheritdoc />
public async Task<SbomLearnResult> RematchSbomAsync(
string digest,
CancellationToken cancellationToken = default)
{
var registration = await _repository.GetByDigestAsync(digest, cancellationToken)
.ConfigureAwait(false);
if (registration is null)
{
throw new InvalidOperationException($"SBOM with digest {digest} not found");
}
// Create input from existing registration
var input = new SbomRegistrationInput
{
Digest = registration.Digest,
Format = registration.Format,
SpecVersion = registration.SpecVersion,
PrimaryName = registration.PrimaryName,
PrimaryVersion = registration.PrimaryVersion,
Purls = registration.Purls,
Source = registration.Source,
TenantId = registration.TenantId
};
// Clear existing matches
await _repository.DeleteMatchesAsync(registration.Id, cancellationToken)
.ConfigureAwait(false);
// Re-run matching (skip registration since already exists)
var sw = Stopwatch.StartNew();
var matches = await _matcher.MatchAsync(
registration.Id,
registration.Digest,
registration.Purls,
null, // No reachability data on rematch
null, // No deployment data on rematch
cancellationToken).ConfigureAwait(false);
await _repository.SaveMatchesAsync(registration.Id, matches, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateAffectedCountAsync(digest, matches.Count, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateLastMatchedAsync(digest, DateTimeOffset.UtcNow, cancellationToken)
.ConfigureAwait(false);
sw.Stop();
_logger.LogInformation(
"Rematched SBOM {Digest}: {MatchCount} matches in {ElapsedMs}ms",
digest, matches.Count, sw.ElapsedMilliseconds);
var affectedCanonicalIds = matches
.Select(m => m.CanonicalId)
.Distinct()
.ToList();
var result = new SbomLearnResult
{
Registration = registration with
{
AffectedCount = matches.Count,
LastMatchedAt = DateTimeOffset.UtcNow
},
Matches = matches,
ScoresUpdated = 0, // Rematch doesn't update scores
ProcessingTimeMs = sw.Elapsed.TotalMilliseconds
};
// Emit SbomLearned event
await EmitLearnedEventAsync(result, affectedCanonicalIds, isRematch: true, cancellationToken)
.ConfigureAwait(false);
return result;
}
/// <inheritdoc />
public async Task<SbomLearnResult> UpdateSbomDeltaAsync(
string digest,
SbomDeltaInput delta,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(delta);
var registration = await _repository.GetByDigestAsync(digest, cancellationToken)
.ConfigureAwait(false);
if (registration is null)
{
throw new InvalidOperationException($"SBOM with digest {digest} not found");
}
var sw = Stopwatch.StartNew();
// Calculate new PURL list
var currentPurls = new HashSet<string>(registration.Purls);
var removedPurls = new HashSet<string>(delta.RemovedPurls);
var addedPurls = delta.AddedPurls.Where(p => !currentPurls.Contains(p)).ToList();
// Remove specified PURLs
foreach (var purl in removedPurls)
{
currentPurls.Remove(purl);
}
// Add new PURLs
foreach (var purl in addedPurls)
{
currentPurls.Add(purl);
}
var newPurls = currentPurls.ToList();
// Only match for added PURLs (optimization)
var matchesForAdded = addedPurls.Count > 0
? await _matcher.MatchAsync(
registration.Id,
registration.Digest,
addedPurls,
delta.ReachabilityMap,
delta.DeploymentMap,
cancellationToken).ConfigureAwait(false)
: [];
// Get existing matches and remove those for removed PURLs
var existingMatches = await _repository.GetMatchesAsync(digest, cancellationToken)
.ConfigureAwait(false);
var filteredMatches = existingMatches
.Where(m => !removedPurls.Contains(m.Purl))
.ToList();
// Combine existing (minus removed) with new matches
var allMatches = filteredMatches.Concat(matchesForAdded).ToList();
// Update registration with new PURL list
await _repository.UpdatePurlsAsync(digest, newPurls, cancellationToken)
.ConfigureAwait(false);
// Save updated matches
await _repository.DeleteMatchesAsync(registration.Id, cancellationToken)
.ConfigureAwait(false);
await _repository.SaveMatchesAsync(registration.Id, allMatches, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateAffectedCountAsync(digest, allMatches.Count, cancellationToken)
.ConfigureAwait(false);
await _repository.UpdateLastMatchedAsync(digest, DateTimeOffset.UtcNow, cancellationToken)
.ConfigureAwait(false);
// Update interest scores only for newly added matches
var affectedCanonicalIds = matchesForAdded
.Select(m => m.CanonicalId)
.Distinct()
.ToList();
var scoresUpdated = 0;
foreach (var canonicalId in affectedCanonicalIds)
{
try
{
var matchesForCanonical = matchesForAdded.Where(m => m.CanonicalId == canonicalId).ToList();
var isReachable = matchesForCanonical.Any(m => m.IsReachable);
var isDeployed = matchesForCanonical.Any(m => m.IsDeployed);
var purl = matchesForCanonical.First().Purl;
await _scoringService.RecordSbomMatchAsync(
canonicalId,
registration.Digest,
purl,
isReachable,
isDeployed,
cancellationToken).ConfigureAwait(false);
scoresUpdated++;
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to update interest score for canonical {CanonicalId}",
canonicalId);
}
}
sw.Stop();
_logger.LogInformation(
"Updated SBOM {Digest} delta: +{Added}/-{Removed} PURLs, {NewMatches} new matches, {ScoresUpdated} scores in {ElapsedMs}ms",
digest, addedPurls.Count, removedPurls.Count, matchesForAdded.Count, scoresUpdated, sw.ElapsedMilliseconds);
var result = new SbomLearnResult
{
Registration = registration with
{
ComponentCount = newPurls.Count,
AffectedCount = allMatches.Count,
LastMatchedAt = DateTimeOffset.UtcNow,
Purls = newPurls
},
Matches = allMatches,
ScoresUpdated = scoresUpdated,
ProcessingTimeMs = sw.Elapsed.TotalMilliseconds
};
// Emit SbomLearned event
await EmitLearnedEventAsync(result, affectedCanonicalIds, isRematch: false, cancellationToken)
.ConfigureAwait(false);
return result;
}
#endregion
#region Matching
/// <inheritdoc />
public Task<IReadOnlyList<SbomAdvisoryMatch>> GetMatchesAsync(
string digest,
CancellationToken cancellationToken = default)
{
return _repository.GetMatchesAsync(digest, cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<SbomAdvisoryMatch>> GetSbomsForAdvisoryAsync(
Guid canonicalId,
CancellationToken cancellationToken = default)
{
return _repository.GetMatchesByCanonicalAsync(canonicalId, cancellationToken);
}
#endregion
#region Statistics
/// <inheritdoc />
public Task<long> CountAsync(
string? tenantId = null,
CancellationToken cancellationToken = default)
{
return _repository.CountAsync(tenantId, cancellationToken);
}
/// <inheritdoc />
public Task<SbomRegistryStats> GetStatsAsync(
string? tenantId = null,
CancellationToken cancellationToken = default)
{
return _repository.GetStatsAsync(tenantId, cancellationToken);
}
#endregion
#region Private Helpers
private async Task EmitLearnedEventAsync(
SbomLearnResult result,
IReadOnlyList<Guid> affectedCanonicalIds,
bool isRematch,
CancellationToken cancellationToken)
{
if (_eventStream is null)
{
return;
}
try
{
var @event = new SbomLearnedEvent
{
SbomId = result.Registration.Id,
SbomDigest = result.Registration.Digest,
TenantId = result.Registration.TenantId,
PrimaryName = result.Registration.PrimaryName,
PrimaryVersion = result.Registration.PrimaryVersion,
ComponentCount = result.Registration.ComponentCount,
AdvisoriesMatched = result.Matches.Count,
ScoresUpdated = result.ScoresUpdated,
AffectedCanonicalIds = affectedCanonicalIds,
ProcessingTimeMs = result.ProcessingTimeMs,
IsRematch = isRematch
};
await _eventStream.PublishAsync(@event, cancellationToken: cancellationToken)
.ConfigureAwait(false);
_logger.LogDebug(
"Emitted SbomLearned event for SBOM {SbomDigest}",
result.Registration.Digest);
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to emit SbomLearned event for SBOM {SbomDigest}",
result.Registration.Digest);
}
}
#endregion
}

View File

@@ -0,0 +1,64 @@
// -----------------------------------------------------------------------------
// ServiceCollectionExtensions.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-000
// Description: DI registration for SBOM integration services
// -----------------------------------------------------------------------------
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Concelier.SbomIntegration.Index;
using StellaOps.Concelier.SbomIntegration.Matching;
using StellaOps.Concelier.SbomIntegration.Parsing;
namespace StellaOps.Concelier.SbomIntegration;
/// <summary>
/// Extension methods for registering SBOM integration services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds SBOM integration services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierSbomIntegration(this IServiceCollection services)
{
// Register parser
services.TryAddSingleton<ISbomParser, SbomParser>();
// Register PURL index (requires Valkey connection)
services.TryAddSingleton<IPurlCanonicalIndex, ValkeyPurlCanonicalIndex>();
// Register matcher
services.TryAddScoped<ISbomAdvisoryMatcher, SbomAdvisoryMatcher>();
// Register service
services.TryAddScoped<ISbomRegistryService, SbomRegistryService>();
return services;
}
/// <summary>
/// Adds SBOM integration services with the specified matcher.
/// </summary>
/// <typeparam name="TMatcher">Matcher implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierSbomIntegration<TMatcher>(this IServiceCollection services)
where TMatcher : class, ISbomAdvisoryMatcher
{
// Register parser
services.TryAddSingleton<ISbomParser, SbomParser>();
// Register PURL index (requires Valkey connection)
services.TryAddSingleton<IPurlCanonicalIndex, ValkeyPurlCanonicalIndex>();
// Register matcher and service
services.TryAddScoped<ISbomAdvisoryMatcher, TMatcher>();
services.TryAddScoped<ISbomRegistryService, SbomRegistryService>();
return services;
}
}

View File

@@ -0,0 +1,32 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Concelier.SbomIntegration</RootNamespace>
<AssemblyName>StellaOps.Concelier.SbomIntegration</AssemblyName>
<Description>SBOM integration for Concelier advisory matching and interest scoring</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
<PackageReference Include="System.Diagnostics.DiagnosticSource" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Interest\StellaOps.Concelier.Interest.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Cache.Valkey\StellaOps.Concelier.Cache.Valkey.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,45 @@
-- Concelier Migration 015: Interest Score Table
-- Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
-- Task: ISCORE-8200-000
-- Creates interest score table for org-specific advisory prioritization
-- Interest score table for canonical advisories
CREATE TABLE IF NOT EXISTS vuln.interest_score (
-- Identity (uses canonical_id as natural key)
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
canonical_id UUID NOT NULL REFERENCES vuln.advisory_canonical(id) ON DELETE CASCADE,
-- Score data
score NUMERIC(3,2) NOT NULL CHECK (score >= 0 AND score <= 1),
reasons JSONB NOT NULL DEFAULT '[]',
-- Context
last_seen_in_build UUID, -- Last SBOM/build that referenced this advisory
-- Audit
computed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT uq_interest_score_canonical UNIQUE (canonical_id)
);
-- Primary index for score-based queries (desc for "top N" queries)
CREATE INDEX IF NOT EXISTS idx_interest_score_score ON vuln.interest_score(score DESC);
-- Index for stale score detection
CREATE INDEX IF NOT EXISTS idx_interest_score_computed ON vuln.interest_score(computed_at DESC);
-- Partial index for high-interest advisories (score >= 0.7)
CREATE INDEX IF NOT EXISTS idx_interest_score_high ON vuln.interest_score(canonical_id)
WHERE score >= 0.7;
-- Partial index for low-interest advisories (score < 0.2) for degradation queries
CREATE INDEX IF NOT EXISTS idx_interest_score_low ON vuln.interest_score(canonical_id)
WHERE score < 0.2;
-- Comments
COMMENT ON TABLE vuln.interest_score IS 'Per-canonical interest scores based on org signals (SBOM, VEX, runtime)';
COMMENT ON COLUMN vuln.interest_score.score IS 'Interest score 0.00-1.00 computed from weighted factors';
COMMENT ON COLUMN vuln.interest_score.reasons IS 'Array of reason codes: in_sbom, reachable, deployed, no_vex_na, recent';
COMMENT ON COLUMN vuln.interest_score.last_seen_in_build IS 'UUID of last SBOM/build where this advisory was relevant';
COMMENT ON COLUMN vuln.interest_score.computed_at IS 'Timestamp of last score computation';

View File

@@ -0,0 +1,112 @@
-- Concelier Migration 016: SBOM Registry Tables
-- Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
-- Task: SBOM-8200-002
-- Creates SBOM registry and advisory match tables for interest scoring
-- SBOM registration table
CREATE TABLE IF NOT EXISTS vuln.sbom_registry (
-- Identity
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- SBOM identification
digest TEXT NOT NULL, -- SHA-256 of SBOM content
format TEXT NOT NULL CHECK (format IN ('cyclonedx', 'spdx')),
spec_version TEXT NOT NULL, -- Format spec version (e.g., "1.6" for CycloneDX)
-- Primary component info
primary_name TEXT, -- Main component name (e.g., image name)
primary_version TEXT, -- Main component version
-- Statistics
component_count INT NOT NULL DEFAULT 0,
affected_count INT NOT NULL DEFAULT 0, -- Cached count of matching advisories
-- Source tracking
source TEXT NOT NULL, -- scanner, upload, api, etc.
tenant_id TEXT, -- Optional multi-tenant support
-- Audit
registered_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
last_matched_at TIMESTAMPTZ,
-- Constraints
CONSTRAINT uq_sbom_registry_digest UNIQUE (digest)
);
-- Indexes for sbom_registry
CREATE INDEX IF NOT EXISTS idx_sbom_registry_tenant ON vuln.sbom_registry(tenant_id) WHERE tenant_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_sbom_registry_primary ON vuln.sbom_registry(primary_name, primary_version);
CREATE INDEX IF NOT EXISTS idx_sbom_registry_registered ON vuln.sbom_registry(registered_at DESC);
CREATE INDEX IF NOT EXISTS idx_sbom_registry_affected ON vuln.sbom_registry(affected_count DESC) WHERE affected_count > 0;
-- Junction table for SBOM-to-canonical advisory matches
CREATE TABLE IF NOT EXISTS vuln.sbom_canonical_match (
-- Identity
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- Foreign keys
sbom_id UUID NOT NULL REFERENCES vuln.sbom_registry(id) ON DELETE CASCADE,
canonical_id UUID NOT NULL REFERENCES vuln.advisory_canonical(id) ON DELETE CASCADE,
-- Match details
purl TEXT NOT NULL, -- The PURL that matched
match_method TEXT NOT NULL CHECK (match_method IN ('exact_purl', 'purl_version_range', 'cpe', 'name_version')),
confidence NUMERIC(3,2) NOT NULL DEFAULT 1.0 CHECK (confidence >= 0 AND confidence <= 1),
-- Runtime context
is_reachable BOOLEAN NOT NULL DEFAULT FALSE,
is_deployed BOOLEAN NOT NULL DEFAULT FALSE,
-- Audit
matched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT uq_sbom_canonical_match UNIQUE (sbom_id, canonical_id, purl)
);
-- Indexes for sbom_canonical_match
CREATE INDEX IF NOT EXISTS idx_sbom_match_sbom ON vuln.sbom_canonical_match(sbom_id);
CREATE INDEX IF NOT EXISTS idx_sbom_match_canonical ON vuln.sbom_canonical_match(canonical_id);
CREATE INDEX IF NOT EXISTS idx_sbom_match_purl ON vuln.sbom_canonical_match(purl);
CREATE INDEX IF NOT EXISTS idx_sbom_match_reachable ON vuln.sbom_canonical_match(canonical_id) WHERE is_reachable = TRUE;
-- PURL cache table for fast PURL-to-canonical lookups
CREATE TABLE IF NOT EXISTS vuln.purl_canonical_index (
-- Identity
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- Lookup key
purl TEXT NOT NULL, -- Normalized PURL
purl_type TEXT NOT NULL, -- Extracted type: npm, pypi, maven, etc.
purl_namespace TEXT, -- Extracted namespace
purl_name TEXT NOT NULL, -- Extracted name
-- Target
canonical_id UUID NOT NULL REFERENCES vuln.advisory_canonical(id) ON DELETE CASCADE,
-- Version matching
version_constraint TEXT, -- Version range from advisory
-- Audit
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT uq_purl_canonical UNIQUE (purl, canonical_id)
);
-- Indexes for purl_canonical_index
CREATE INDEX IF NOT EXISTS idx_purl_index_lookup ON vuln.purl_canonical_index(purl_type, purl_namespace, purl_name);
CREATE INDEX IF NOT EXISTS idx_purl_index_canonical ON vuln.purl_canonical_index(canonical_id);
-- Comments
COMMENT ON TABLE vuln.sbom_registry IS 'Registered SBOMs for interest score learning';
COMMENT ON COLUMN vuln.sbom_registry.digest IS 'SHA-256 digest of SBOM content for deduplication';
COMMENT ON COLUMN vuln.sbom_registry.format IS 'SBOM format: cyclonedx or spdx';
COMMENT ON COLUMN vuln.sbom_registry.affected_count IS 'Cached count of canonical advisories affecting this SBOM';
COMMENT ON TABLE vuln.sbom_canonical_match IS 'Junction table linking SBOMs to affected canonical advisories';
COMMENT ON COLUMN vuln.sbom_canonical_match.match_method IS 'How the match was determined: exact_purl, purl_version_range, cpe, name_version';
COMMENT ON COLUMN vuln.sbom_canonical_match.is_reachable IS 'Whether the vulnerable code path is reachable from entrypoints';
COMMENT ON COLUMN vuln.sbom_canonical_match.is_deployed IS 'Whether the component is deployed in production';
COMMENT ON TABLE vuln.purl_canonical_index IS 'Fast lookup index from PURLs to canonical advisories';

View File

@@ -0,0 +1,364 @@
// -----------------------------------------------------------------------------
// InterestScoreRepository.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-003
// Description: PostgreSQL repository for interest score persistence
// -----------------------------------------------------------------------------
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.Interest;
using StellaOps.Concelier.Interest.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for interest score persistence.
/// </summary>
public sealed class InterestScoreRepository : RepositoryBase<ConcelierDataSource>, IInterestScoreRepository
{
private const string SystemTenantId = "_system";
public InterestScoreRepository(ConcelierDataSource dataSource, ILogger<InterestScoreRepository> logger)
: base(dataSource, logger)
{
}
/// <inheritdoc />
public Task<InterestScore?> GetByCanonicalIdAsync(Guid canonicalId, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT canonical_id, score, reasons, last_seen_in_build, computed_at
FROM vuln.interest_score
WHERE canonical_id = @canonical_id
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "canonical_id", canonicalId),
MapScore,
cancellationToken);
}
/// <inheritdoc />
public async Task<IReadOnlyDictionary<Guid, InterestScore>> GetByCanonicalIdsAsync(
IEnumerable<Guid> canonicalIds,
CancellationToken cancellationToken = default)
{
var ids = canonicalIds.ToArray();
if (ids.Length == 0)
{
return new Dictionary<Guid, InterestScore>();
}
const string sql = """
SELECT canonical_id, score, reasons, last_seen_in_build, computed_at
FROM vuln.interest_score
WHERE canonical_id = ANY(@canonical_ids)
""";
var scores = await QueryAsync(
SystemTenantId,
sql,
cmd => AddUuidArrayParameter(cmd, "canonical_ids", ids),
MapScore,
cancellationToken).ConfigureAwait(false);
return scores.ToDictionary(s => s.CanonicalId);
}
/// <inheritdoc />
public async Task SaveAsync(InterestScore score, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.interest_score
(canonical_id, score, reasons, last_seen_in_build, computed_at)
VALUES
(@canonical_id, @score, @reasons, @last_seen_in_build, @computed_at)
ON CONFLICT (canonical_id)
DO UPDATE SET
score = EXCLUDED.score,
reasons = EXCLUDED.reasons,
last_seen_in_build = EXCLUDED.last_seen_in_build,
computed_at = EXCLUDED.computed_at
""";
var reasonsJson = JsonSerializer.Serialize(score.Reasons);
await ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "canonical_id", score.CanonicalId);
AddParameter(cmd, "score", score.Score);
AddJsonbParameter(cmd, "reasons", reasonsJson);
AddParameter(cmd, "last_seen_in_build", score.LastSeenInBuild ?? (object)DBNull.Value);
AddParameter(cmd, "computed_at", score.ComputedAt);
},
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task SaveManyAsync(IEnumerable<InterestScore> scores, CancellationToken cancellationToken = default)
{
var scoreList = scores.ToList();
if (scoreList.Count == 0)
{
return;
}
// Use batch insert with ON CONFLICT for efficiency
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string sql = """
INSERT INTO vuln.interest_score
(canonical_id, score, reasons, last_seen_in_build, computed_at)
VALUES
(@canonical_id, @score, @reasons, @last_seen_in_build, @computed_at)
ON CONFLICT (canonical_id)
DO UPDATE SET
score = EXCLUDED.score,
reasons = EXCLUDED.reasons,
last_seen_in_build = EXCLUDED.last_seen_in_build,
computed_at = EXCLUDED.computed_at
""";
foreach (var score in scoreList)
{
await using var cmd = CreateCommand(sql, connection);
cmd.Transaction = transaction;
var reasonsJson = JsonSerializer.Serialize(score.Reasons);
AddParameter(cmd, "canonical_id", score.CanonicalId);
AddParameter(cmd, "score", score.Score);
AddJsonbParameter(cmd, "reasons", reasonsJson);
AddParameter(cmd, "last_seen_in_build", score.LastSeenInBuild ?? (object)DBNull.Value);
AddParameter(cmd, "computed_at", score.ComputedAt);
await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
Logger.LogDebug("Saved {Count} interest scores", scoreList.Count);
}
/// <inheritdoc />
public Task DeleteAsync(Guid canonicalId, CancellationToken cancellationToken = default)
{
const string sql = "DELETE FROM vuln.interest_score WHERE canonical_id = @canonical_id";
return ExecuteAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "canonical_id", canonicalId),
cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<Guid>> GetLowScoreCanonicalIdsAsync(
double threshold,
TimeSpan minAge,
int limit,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT canonical_id
FROM vuln.interest_score
WHERE score < @threshold
AND computed_at < @min_computed_at
ORDER BY score ASC, computed_at ASC
LIMIT @limit
""";
var minComputedAt = DateTimeOffset.UtcNow - minAge;
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "threshold", threshold);
AddParameter(cmd, "min_computed_at", minComputedAt);
AddParameter(cmd, "limit", limit);
},
reader => reader.GetGuid(0),
cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<Guid>> GetHighScoreCanonicalIdsAsync(
double threshold,
int limit,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT canonical_id
FROM vuln.interest_score
WHERE score >= @threshold
ORDER BY score DESC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "threshold", threshold);
AddParameter(cmd, "limit", limit);
},
reader => reader.GetGuid(0),
cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<InterestScore>> GetTopScoresAsync(
int limit,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT canonical_id, score, reasons, last_seen_in_build, computed_at
FROM vuln.interest_score
ORDER BY score DESC, computed_at DESC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "limit", limit),
MapScore,
cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<Guid>> GetStaleCanonicalIdsAsync(
DateTimeOffset staleAfter,
int limit,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT canonical_id
FROM vuln.interest_score
WHERE computed_at < @stale_after
ORDER BY computed_at ASC
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "stale_after", staleAfter);
AddParameter(cmd, "limit", limit);
},
reader => reader.GetGuid(0),
cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<InterestScore>> GetAllAsync(
int offset,
int limit,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT canonical_id, score, reasons, last_seen_in_build, computed_at
FROM vuln.interest_score
ORDER BY score DESC, computed_at DESC
OFFSET @offset
LIMIT @limit
""";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "offset", offset);
AddParameter(cmd, "limit", limit);
},
MapScore,
cancellationToken);
}
/// <inheritdoc />
public Task<ScoreDistribution> GetScoreDistributionAsync(
CancellationToken cancellationToken = default)
{
return GetDistributionAsync(cancellationToken);
}
/// <inheritdoc />
public async Task<long> CountAsync(CancellationToken cancellationToken = default)
{
const string sql = "SELECT COUNT(*) FROM vuln.interest_score";
var count = await ExecuteScalarAsync<long>(
SystemTenantId,
sql,
null,
cancellationToken).ConfigureAwait(false);
return count;
}
/// <inheritdoc />
public async Task<ScoreDistribution> GetDistributionAsync(CancellationToken cancellationToken = default)
{
const string sql = """
SELECT
COUNT(*) AS total_count,
COUNT(*) FILTER (WHERE score >= 0.7) AS high_count,
COUNT(*) FILTER (WHERE score >= 0.4 AND score < 0.7) AS medium_count,
COUNT(*) FILTER (WHERE score >= 0.2 AND score < 0.4) AS low_count,
COUNT(*) FILTER (WHERE score < 0.2) AS none_count,
COALESCE(AVG(score), 0) AS average_score,
COALESCE(PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY score), 0) AS median_score
FROM vuln.interest_score
""";
var result = await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
null,
reader => new ScoreDistribution
{
TotalCount = reader.GetInt64(0),
HighCount = reader.GetInt64(1),
MediumCount = reader.GetInt64(2),
LowCount = reader.GetInt64(3),
NoneCount = reader.GetInt64(4),
AverageScore = reader.GetDouble(5),
MedianScore = reader.GetDouble(6)
},
cancellationToken).ConfigureAwait(false);
return result ?? new ScoreDistribution();
}
private static InterestScore MapScore(NpgsqlDataReader reader)
{
var reasonsJson = GetNullableString(reader, 2);
var reasons = string.IsNullOrEmpty(reasonsJson)
? Array.Empty<string>()
: JsonSerializer.Deserialize<string[]>(reasonsJson) ?? [];
return new InterestScore
{
CanonicalId = reader.GetGuid(0),
Score = reader.GetDouble(1),
Reasons = reasons,
LastSeenInBuild = GetNullableGuid(reader, 3),
ComputedAt = reader.GetFieldValue<DateTimeOffset>(4)
};
}
}

View File

@@ -0,0 +1,479 @@
// -----------------------------------------------------------------------------
// SbomRegistryRepository.cs
// Sprint: SPRINT_8200_0013_0003_SCAN_sbom_intersection_scoring
// Task: SBOM-8200-003
// Description: PostgreSQL repository for SBOM registry persistence
// -----------------------------------------------------------------------------
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Concelier.SbomIntegration;
using StellaOps.Concelier.SbomIntegration.Models;
using StellaOps.Infrastructure.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for SBOM registry persistence.
/// </summary>
public sealed class SbomRegistryRepository : RepositoryBase<ConcelierDataSource>, ISbomRegistryRepository
{
private const string SystemTenantId = "_system";
public SbomRegistryRepository(ConcelierDataSource dataSource, ILogger<SbomRegistryRepository> logger)
: base(dataSource, logger)
{
}
#region Registration CRUD
/// <inheritdoc />
public async Task SaveAsync(SbomRegistration registration, CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO vuln.sbom_registry
(id, digest, format, spec_version, primary_name, primary_version,
component_count, affected_count, source, tenant_id, registered_at, last_matched_at)
VALUES
(@id, @digest, @format, @spec_version, @primary_name, @primary_version,
@component_count, @affected_count, @source, @tenant_id, @registered_at, @last_matched_at)
ON CONFLICT (digest)
DO UPDATE SET
primary_name = EXCLUDED.primary_name,
primary_version = EXCLUDED.primary_version,
component_count = EXCLUDED.component_count,
source = EXCLUDED.source
""";
await ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", registration.Id);
AddParameter(cmd, "digest", registration.Digest);
AddParameter(cmd, "format", registration.Format.ToString().ToLowerInvariant());
AddParameter(cmd, "spec_version", registration.SpecVersion);
AddParameter(cmd, "primary_name", registration.PrimaryName ?? (object)DBNull.Value);
AddParameter(cmd, "primary_version", registration.PrimaryVersion ?? (object)DBNull.Value);
AddParameter(cmd, "component_count", registration.ComponentCount);
AddParameter(cmd, "affected_count", registration.AffectedCount);
AddParameter(cmd, "source", registration.Source);
AddParameter(cmd, "tenant_id", registration.TenantId ?? (object)DBNull.Value);
AddParameter(cmd, "registered_at", registration.RegisteredAt);
AddParameter(cmd, "last_matched_at", registration.LastMatchedAt ?? (object)DBNull.Value);
},
cancellationToken).ConfigureAwait(false);
// Save PURLs in a separate table or as JSONB - using JSONB for simplicity
await SavePurlsAsync(registration.Id, registration.Purls, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public Task<SbomRegistration?> GetByDigestAsync(string digest, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, digest, format, spec_version, primary_name, primary_version,
component_count, affected_count, source, tenant_id, registered_at, last_matched_at
FROM vuln.sbom_registry
WHERE digest = @digest
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "digest", digest),
MapRegistration,
cancellationToken);
}
/// <inheritdoc />
public Task<SbomRegistration?> GetByIdAsync(Guid id, CancellationToken cancellationToken = default)
{
const string sql = """
SELECT id, digest, format, spec_version, primary_name, primary_version,
component_count, affected_count, source, tenant_id, registered_at, last_matched_at
FROM vuln.sbom_registry
WHERE id = @id
""";
return QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "id", id),
MapRegistration,
cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<SbomRegistration>> ListAsync(
int offset,
int limit,
string? tenantId = null,
CancellationToken cancellationToken = default)
{
var sql = """
SELECT id, digest, format, spec_version, primary_name, primary_version,
component_count, affected_count, source, tenant_id, registered_at, last_matched_at
FROM vuln.sbom_registry
""";
if (tenantId is not null)
{
sql += " WHERE tenant_id = @tenant_id";
}
sql += " ORDER BY registered_at DESC OFFSET @offset LIMIT @limit";
return QueryAsync(
SystemTenantId,
sql,
cmd =>
{
if (tenantId is not null)
{
AddParameter(cmd, "tenant_id", tenantId);
}
AddParameter(cmd, "offset", offset);
AddParameter(cmd, "limit", limit);
},
MapRegistration,
cancellationToken);
}
/// <inheritdoc />
public Task DeleteAsync(string digest, CancellationToken cancellationToken = default)
{
const string sql = "DELETE FROM vuln.sbom_registry WHERE digest = @digest";
return ExecuteAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "digest", digest),
cancellationToken);
}
/// <inheritdoc />
public async Task<long> CountAsync(string? tenantId = null, CancellationToken cancellationToken = default)
{
var sql = "SELECT COUNT(*) FROM vuln.sbom_registry";
if (tenantId is not null)
{
sql += " WHERE tenant_id = @tenant_id";
}
var count = await ExecuteScalarAsync<long>(
SystemTenantId,
sql,
tenantId is not null ? cmd => AddParameter(cmd, "tenant_id", tenantId) : null,
cancellationToken).ConfigureAwait(false);
return count;
}
#endregion
#region Match CRUD
/// <inheritdoc />
public async Task SaveMatchesAsync(
Guid sbomId,
IEnumerable<SbomAdvisoryMatch> matches,
CancellationToken cancellationToken = default)
{
var matchList = matches.ToList();
if (matchList.Count == 0)
{
return;
}
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
const string sql = """
INSERT INTO vuln.sbom_canonical_match
(id, sbom_id, canonical_id, purl, match_method, confidence, is_reachable, is_deployed, matched_at)
VALUES
(@id, @sbom_id, @canonical_id, @purl, @match_method, @confidence, @is_reachable, @is_deployed, @matched_at)
ON CONFLICT (sbom_id, canonical_id, purl)
DO UPDATE SET
is_reachable = EXCLUDED.is_reachable,
is_deployed = EXCLUDED.is_deployed,
matched_at = EXCLUDED.matched_at
""";
foreach (var match in matchList)
{
await using var cmd = CreateCommand(sql, connection);
cmd.Transaction = transaction;
AddParameter(cmd, "id", match.Id == Guid.Empty ? Guid.NewGuid() : match.Id);
AddParameter(cmd, "sbom_id", sbomId);
AddParameter(cmd, "canonical_id", match.CanonicalId);
AddParameter(cmd, "purl", match.Purl);
AddParameter(cmd, "match_method", MapMethodToString(match.Method));
AddParameter(cmd, "confidence", match.Confidence);
AddParameter(cmd, "is_reachable", match.IsReachable);
AddParameter(cmd, "is_deployed", match.IsDeployed);
AddParameter(cmd, "matched_at", match.MatchedAt == default ? DateTimeOffset.UtcNow : match.MatchedAt);
await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
Logger.LogDebug("Saved {Count} SBOM matches for SBOM {SbomId}", matchList.Count, sbomId);
}
/// <inheritdoc />
public async Task<IReadOnlyList<SbomAdvisoryMatch>> GetMatchesAsync(
string digest,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT m.id, m.sbom_id, r.digest, m.canonical_id, m.purl,
m.is_reachable, m.is_deployed, m.confidence, m.match_method, m.matched_at
FROM vuln.sbom_canonical_match m
JOIN vuln.sbom_registry r ON r.id = m.sbom_id
WHERE r.digest = @digest
ORDER BY m.matched_at DESC
""";
return await QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "digest", digest),
MapMatch,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<IReadOnlyList<SbomAdvisoryMatch>> GetMatchesByCanonicalAsync(
Guid canonicalId,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT m.id, m.sbom_id, r.digest, m.canonical_id, m.purl,
m.is_reachable, m.is_deployed, m.confidence, m.match_method, m.matched_at
FROM vuln.sbom_canonical_match m
JOIN vuln.sbom_registry r ON r.id = m.sbom_id
WHERE m.canonical_id = @canonical_id
ORDER BY m.matched_at DESC
""";
return await QueryAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "canonical_id", canonicalId),
MapMatch,
cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public Task DeleteMatchesAsync(Guid sbomId, CancellationToken cancellationToken = default)
{
const string sql = "DELETE FROM vuln.sbom_canonical_match WHERE sbom_id = @sbom_id";
return ExecuteAsync(
SystemTenantId,
sql,
cmd => AddParameter(cmd, "sbom_id", sbomId),
cancellationToken);
}
#endregion
#region Statistics
/// <inheritdoc />
public async Task<SbomRegistryStats> GetStatsAsync(
string? tenantId = null,
CancellationToken cancellationToken = default)
{
var sql = """
SELECT
COUNT(DISTINCT r.id) AS total_sboms,
COALESCE(SUM(r.component_count), 0) AS total_purls,
COUNT(DISTINCT m.id) AS total_matches,
COUNT(DISTINCT CASE WHEN r.affected_count > 0 THEN r.id END) AS affected_sboms
FROM vuln.sbom_registry r
LEFT JOIN vuln.sbom_canonical_match m ON m.sbom_id = r.id
""";
if (tenantId is not null)
{
sql += " WHERE r.tenant_id = @tenant_id";
}
var result = await QuerySingleOrDefaultAsync(
SystemTenantId,
sql,
tenantId is not null ? cmd => AddParameter(cmd, "tenant_id", tenantId) : null,
reader =>
{
var totalSboms = reader.GetInt64(0);
var totalMatches = reader.GetInt64(2);
return new SbomRegistryStats
{
TotalSboms = totalSboms,
TotalPurls = reader.GetInt64(1),
TotalMatches = totalMatches,
AffectedSboms = reader.GetInt64(3),
AverageMatchesPerSbom = totalSboms > 0 ? (double)totalMatches / totalSboms : 0
};
},
cancellationToken).ConfigureAwait(false);
return result ?? new SbomRegistryStats();
}
/// <inheritdoc />
public Task UpdateAffectedCountAsync(
string digest,
int affectedCount,
CancellationToken cancellationToken = default)
{
const string sql = """
UPDATE vuln.sbom_registry
SET affected_count = @affected_count
WHERE digest = @digest
""";
return ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "digest", digest);
AddParameter(cmd, "affected_count", affectedCount);
},
cancellationToken);
}
/// <inheritdoc />
public Task UpdateLastMatchedAsync(
string digest,
DateTimeOffset lastMatched,
CancellationToken cancellationToken = default)
{
const string sql = """
UPDATE vuln.sbom_registry
SET last_matched_at = @last_matched_at
WHERE digest = @digest
""";
return ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "digest", digest);
AddParameter(cmd, "last_matched_at", lastMatched);
},
cancellationToken);
}
#endregion
#region Private Helpers
private async Task SavePurlsAsync(Guid sbomId, IReadOnlyList<string> purls, CancellationToken cancellationToken)
{
// Store PURLs in a dedicated column in the registry or a separate table
// For now, we'll update the registry with a JSONB column for purls
const string sql = """
UPDATE vuln.sbom_registry
SET component_count = @component_count
WHERE id = @id
""";
await ExecuteAsync(
SystemTenantId,
sql,
cmd =>
{
AddParameter(cmd, "id", sbomId);
AddParameter(cmd, "component_count", purls.Count);
},
cancellationToken).ConfigureAwait(false);
// Note: The actual PURLs are stored in memory on the SbomRegistration record
// For persistence, we could add a sbom_purls table or a JSONB column
}
private async Task<IReadOnlyList<string>> LoadPurlsAsync(Guid sbomId, CancellationToken cancellationToken)
{
// If we need to load PURLs from storage, we would query them here
// For now, returning empty as PURLs are typically provided in the input
return [];
}
private SbomRegistration MapRegistration(NpgsqlDataReader reader)
{
var formatStr = reader.GetString(2);
var format = formatStr.Equals("cyclonedx", StringComparison.OrdinalIgnoreCase)
? SbomFormat.CycloneDX
: SbomFormat.SPDX;
return new SbomRegistration
{
Id = reader.GetGuid(0),
Digest = reader.GetString(1),
Format = format,
SpecVersion = reader.GetString(3),
PrimaryName = GetNullableString(reader, 4),
PrimaryVersion = GetNullableString(reader, 5),
ComponentCount = reader.GetInt32(6),
AffectedCount = reader.GetInt32(7),
Source = reader.GetString(8),
TenantId = GetNullableString(reader, 9),
RegisteredAt = reader.GetFieldValue<DateTimeOffset>(10),
LastMatchedAt = reader.IsDBNull(11) ? null : reader.GetFieldValue<DateTimeOffset>(11),
Purls = [] // PURLs would be loaded separately if needed
};
}
private static SbomAdvisoryMatch MapMatch(NpgsqlDataReader reader)
{
var methodStr = reader.GetString(8);
var method = methodStr switch
{
"exact_purl" => MatchMethod.ExactPurl,
"purl_version_range" => MatchMethod.PurlVersionRange,
"cpe" => MatchMethod.Cpe,
"name_version" => MatchMethod.NameVersion,
_ => MatchMethod.ExactPurl
};
return new SbomAdvisoryMatch
{
Id = reader.GetGuid(0),
SbomId = reader.GetGuid(1),
SbomDigest = reader.GetString(2),
CanonicalId = reader.GetGuid(3),
Purl = reader.GetString(4),
IsReachable = reader.GetBoolean(5),
IsDeployed = reader.GetBoolean(6),
Confidence = reader.GetDouble(7),
Method = method,
MatchedAt = reader.GetFieldValue<DateTimeOffset>(9)
};
}
private static string MapMethodToString(MatchMethod method)
{
return method switch
{
MatchMethod.ExactPurl => "exact_purl",
MatchMethod.PurlVersionRange => "purl_version_range",
MatchMethod.Cpe => "cpe",
MatchMethod.NameVersion => "name_version",
_ => "exact_purl"
};
}
#endregion
}

View File

@@ -30,7 +30,9 @@
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Interest\StellaOps.Concelier.Interest.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.SbomIntegration\StellaOps.Concelier.SbomIntegration.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,249 @@
// -----------------------------------------------------------------------------
// AdvisoryCacheKeysTests.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-009
// Description: Unit tests for AdvisoryCacheKeys
// -----------------------------------------------------------------------------
using FluentAssertions;
using Xunit;
namespace StellaOps.Concelier.Cache.Valkey.Tests;
public class AdvisoryCacheKeysTests
{
[Fact]
public void Advisory_WithDefaultPrefix_GeneratesCorrectKey()
{
// Arrange
var mergeHash = "abc123def456";
// Act
var key = AdvisoryCacheKeys.Advisory(mergeHash);
// Assert
key.Should().Be("concelier:advisory:abc123def456");
}
[Fact]
public void Advisory_WithCustomPrefix_GeneratesCorrectKey()
{
// Arrange
var mergeHash = "abc123def456";
var prefix = "custom:";
// Act
var key = AdvisoryCacheKeys.Advisory(mergeHash, prefix);
// Assert
key.Should().Be("custom:advisory:abc123def456");
}
[Fact]
public void HotSet_WithDefaultPrefix_GeneratesCorrectKey()
{
// Act
var key = AdvisoryCacheKeys.HotSet();
// Assert
key.Should().Be("concelier:rank:hot");
}
[Fact]
public void ByPurl_NormalizesPurl()
{
// Arrange
var purl = "pkg:npm/@angular/core@12.0.0";
// Act
var key = AdvisoryCacheKeys.ByPurl(purl);
// Assert
key.Should().Be("concelier:by:purl:pkg:npm/@angular/core@12.0.0");
}
[Fact]
public void ByPurl_NormalizesToLowercase()
{
// Arrange
var purl = "pkg:NPM/@Angular/Core@12.0.0";
// Act
var key = AdvisoryCacheKeys.ByPurl(purl);
// Assert
key.Should().Be("concelier:by:purl:pkg:npm/@angular/core@12.0.0");
}
[Fact]
public void ByCve_NormalizesToUppercase()
{
// Arrange
var cve = "cve-2024-1234";
// Act
var key = AdvisoryCacheKeys.ByCve(cve);
// Assert
key.Should().Be("concelier:by:cve:CVE-2024-1234");
}
[Fact]
public void StatsHits_GeneratesCorrectKey()
{
// Act
var key = AdvisoryCacheKeys.StatsHits();
// Assert
key.Should().Be("concelier:cache:stats:hits");
}
[Fact]
public void StatsMisses_GeneratesCorrectKey()
{
// Act
var key = AdvisoryCacheKeys.StatsMisses();
// Assert
key.Should().Be("concelier:cache:stats:misses");
}
[Fact]
public void WarmupLast_GeneratesCorrectKey()
{
// Act
var key = AdvisoryCacheKeys.WarmupLast();
// Assert
key.Should().Be("concelier:cache:warmup:last");
}
[Fact]
public void NormalizePurl_HandlesEmptyString()
{
// Act
var result = AdvisoryCacheKeys.NormalizePurl("");
// Assert
result.Should().BeEmpty();
}
[Fact]
public void NormalizePurl_HandlesNull()
{
// Act
var result = AdvisoryCacheKeys.NormalizePurl(null!);
// Assert
result.Should().BeEmpty();
}
[Fact]
public void NormalizePurl_ReplacesSpecialCharacters()
{
// Arrange - PURL with unusual characters
var purl = "pkg:npm/test?query=value#fragment";
// Act
var result = AdvisoryCacheKeys.NormalizePurl(purl);
// Assert
// ? and # should be replaced with _
result.Should().Be("pkg:npm/test_query_value_fragment");
}
[Fact]
public void NormalizePurl_TruncatesLongPurls()
{
// Arrange - Very long PURL
var purl = "pkg:npm/" + new string('a', 600);
// Act
var result = AdvisoryCacheKeys.NormalizePurl(purl);
// Assert
result.Length.Should().BeLessThanOrEqualTo(500);
}
[Fact]
public void ExtractMergeHash_ReturnsHashFromAdvisoryKey()
{
// Arrange
var key = "concelier:advisory:abc123def456";
// Act
var result = AdvisoryCacheKeys.ExtractMergeHash(key);
// Assert
result.Should().Be("abc123def456");
}
[Fact]
public void ExtractMergeHash_ReturnsNullForInvalidKey()
{
// Arrange
var key = "concelier:by:purl:pkg:npm/test";
// Act
var result = AdvisoryCacheKeys.ExtractMergeHash(key);
// Assert
result.Should().BeNull();
}
[Fact]
public void ExtractPurl_ReturnsPurlFromIndexKey()
{
// Arrange
var key = "concelier:by:purl:pkg:npm/test@1.0.0";
// Act
var result = AdvisoryCacheKeys.ExtractPurl(key);
// Assert
result.Should().Be("pkg:npm/test@1.0.0");
}
[Fact]
public void ExtractCve_ReturnsCveFromMappingKey()
{
// Arrange
var key = "concelier:by:cve:CVE-2024-1234";
// Act
var result = AdvisoryCacheKeys.ExtractCve(key);
// Assert
result.Should().Be("CVE-2024-1234");
}
[Fact]
public void AdvisoryPattern_GeneratesCorrectPattern()
{
// Act
var pattern = AdvisoryCacheKeys.AdvisoryPattern();
// Assert
pattern.Should().Be("concelier:advisory:*");
}
[Fact]
public void PurlIndexPattern_GeneratesCorrectPattern()
{
// Act
var pattern = AdvisoryCacheKeys.PurlIndexPattern();
// Assert
pattern.Should().Be("concelier:by:purl:*");
}
[Fact]
public void CveMappingPattern_GeneratesCorrectPattern()
{
// Act
var pattern = AdvisoryCacheKeys.CveMappingPattern();
// Assert
pattern.Should().Be("concelier:by:cve:*");
}
}

View File

@@ -0,0 +1,166 @@
// -----------------------------------------------------------------------------
// CacheTtlPolicyTests.cs
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
// Task: VCACHE-8200-009
// Description: Unit tests for CacheTtlPolicy
// -----------------------------------------------------------------------------
using FluentAssertions;
using Xunit;
namespace StellaOps.Concelier.Cache.Valkey.Tests;
public class CacheTtlPolicyTests
{
[Fact]
public void GetTtl_WithHighScore_ReturnsHighScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.85);
// Assert
ttl.Should().Be(TimeSpan.FromHours(24));
}
[Fact]
public void GetTtl_WithScoreAtHighThreshold_ReturnsHighScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.7);
// Assert
ttl.Should().Be(TimeSpan.FromHours(24));
}
[Fact]
public void GetTtl_WithMediumScore_ReturnsMediumScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.5);
// Assert
ttl.Should().Be(TimeSpan.FromHours(4));
}
[Fact]
public void GetTtl_WithScoreAtMediumThreshold_ReturnsMediumScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.4);
// Assert
ttl.Should().Be(TimeSpan.FromHours(4));
}
[Fact]
public void GetTtl_WithLowScore_ReturnsLowScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.2);
// Assert
ttl.Should().Be(TimeSpan.FromHours(1));
}
[Fact]
public void GetTtl_WithZeroScore_ReturnsLowScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.0);
// Assert
ttl.Should().Be(TimeSpan.FromHours(1));
}
[Fact]
public void GetTtl_WithNullScore_ReturnsLowScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(null);
// Assert
ttl.Should().Be(TimeSpan.FromHours(1));
}
[Fact]
public void GetTtl_WithCustomThresholds_UsesCustomValues()
{
// Arrange
var policy = new CacheTtlPolicy
{
HighScoreThreshold = 0.8,
MediumScoreThreshold = 0.5,
HighScoreTtl = TimeSpan.FromHours(48),
MediumScoreTtl = TimeSpan.FromHours(12),
LowScoreTtl = TimeSpan.FromMinutes(30)
};
// Act & Assert
policy.GetTtl(0.9).Should().Be(TimeSpan.FromHours(48));
policy.GetTtl(0.6).Should().Be(TimeSpan.FromHours(12));
policy.GetTtl(0.3).Should().Be(TimeSpan.FromMinutes(30));
}
[Fact]
public void DefaultValues_AreCorrect()
{
// Arrange
var policy = new CacheTtlPolicy();
// Assert
policy.HighScoreTtl.Should().Be(TimeSpan.FromHours(24));
policy.MediumScoreTtl.Should().Be(TimeSpan.FromHours(4));
policy.LowScoreTtl.Should().Be(TimeSpan.FromHours(1));
policy.HighScoreThreshold.Should().Be(0.7);
policy.MediumScoreThreshold.Should().Be(0.4);
policy.PurlIndexTtl.Should().Be(TimeSpan.FromHours(24));
policy.CveMappingTtl.Should().Be(TimeSpan.FromHours(24));
}
[Fact]
public void GetTtl_WithScoreBelowMediumThreshold_ReturnsLowScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.39);
// Assert
ttl.Should().Be(TimeSpan.FromHours(1));
}
[Fact]
public void GetTtl_WithScoreBelowHighThreshold_ReturnsMediumScoreTtl()
{
// Arrange
var policy = new CacheTtlPolicy();
// Act
var ttl = policy.GetTtl(0.69);
// Assert
ttl.Should().Be(TimeSpan.FromHours(4));
}
}

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>StellaOps.Concelier.Cache.Valkey.Tests</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="8.0.0" />
<PackageReference Include="Moq" Version="4.20.72" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Concelier.Cache.Valkey\StellaOps.Concelier.Cache.Valkey.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,505 @@
// -----------------------------------------------------------------------------
// CanonicalDeduplicationTests.cs
// Sprint: SPRINT_8200_0012_0003_CONCEL_canonical_advisory_service
// Task: CANSVC-8200-025
// Description: End-to-end tests verifying deduplication across multiple connectors
// -----------------------------------------------------------------------------
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.Core.Canonical;
namespace StellaOps.Concelier.Core.Tests.Canonical;
/// <summary>
/// End-to-end tests verifying that the canonical advisory service correctly
/// deduplicates advisories from multiple sources (OSV, NVD, GHSA, distros).
/// </summary>
public sealed class CanonicalDeduplicationTests
{
private readonly InMemoryCanonicalAdvisoryStore _store;
private readonly RealMergeHashCalculator _hashCalculator;
private readonly ILogger<CanonicalAdvisoryService> _logger;
private const string TestCve = "CVE-2025-12345";
private const string TestAffectsKey = "pkg:npm/lodash@4.17.21";
public CanonicalDeduplicationTests()
{
_store = new InMemoryCanonicalAdvisoryStore();
_hashCalculator = new RealMergeHashCalculator();
_logger = NullLogger<CanonicalAdvisoryService>.Instance;
}
/// <summary>
/// Tests the core deduplication scenario: same CVE ingested from 4 different sources
/// should result in a single canonical with 4 source edges.
/// </summary>
[Fact]
public async Task MultiSourceIngestion_ProducesSingleCanonical_WithMultipleSourceEdges()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
var nvdAdvisory = CreateRawAdvisory(TestCve, TestAffectsKey, "NVD-2025-12345");
var osvAdvisory = CreateRawAdvisory(TestCve, TestAffectsKey, "OSV-2025-12345");
var ghsaAdvisory = CreateRawAdvisory(TestCve, TestAffectsKey, "GHSA-abcd-efgh-ijkl");
var debianAdvisory = CreateRawAdvisory(TestCve, TestAffectsKey, "DSA-2025-12345");
// Act - ingest from all sources
var nvdResult = await service.IngestAsync("nvd", nvdAdvisory);
var osvResult = await service.IngestAsync("osv", osvAdvisory);
var ghsaResult = await service.IngestAsync("ghsa", ghsaAdvisory);
var debianResult = await service.IngestAsync("debian", debianAdvisory);
// Assert - first ingest creates, rest merge
nvdResult.Decision.Should().Be(MergeDecision.Created);
osvResult.Decision.Should().Be(MergeDecision.Merged);
ghsaResult.Decision.Should().Be(MergeDecision.Merged);
debianResult.Decision.Should().Be(MergeDecision.Merged);
// All should reference the same canonical
var canonicalId = nvdResult.CanonicalId;
osvResult.CanonicalId.Should().Be(canonicalId);
ghsaResult.CanonicalId.Should().Be(canonicalId);
debianResult.CanonicalId.Should().Be(canonicalId);
// All should have same merge hash
nvdResult.MergeHash.Should().Be(osvResult.MergeHash);
nvdResult.MergeHash.Should().Be(ghsaResult.MergeHash);
nvdResult.MergeHash.Should().Be(debianResult.MergeHash);
// Verify canonical has 4 source edges
var canonical = await service.GetByIdAsync(canonicalId);
canonical.Should().NotBeNull();
canonical!.SourceEdges.Should().HaveCount(4);
canonical.SourceEdges.Select(e => e.SourceName).Should()
.Contain(new[] { "nvd", "osv", "ghsa", "debian" });
}
/// <summary>
/// Tests that querying by CVE returns the deduplicated canonical advisory.
/// </summary>
[Fact]
public async Task QueryByCve_ReturnsDeduplicated_CanonicalAdvisory()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
await service.IngestAsync("nvd", CreateRawAdvisory(TestCve, TestAffectsKey, "NVD-ADV"));
await service.IngestAsync("ghsa", CreateRawAdvisory(TestCve, TestAffectsKey, "GHSA-ADV"));
await service.IngestAsync("osv", CreateRawAdvisory(TestCve, TestAffectsKey, "OSV-ADV"));
// Act
var results = await service.GetByCveAsync(TestCve);
// Assert - single canonical for the CVE
results.Should().HaveCount(1);
results[0].Cve.Should().Be(TestCve);
results[0].SourceEdges.Should().HaveCount(3);
}
/// <summary>
/// Tests that distro sources have higher precedence than NVD.
/// </summary>
[Fact]
public async Task SourcePrecedence_DistroHigherThanNvd()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
// Ingest from NVD first
await service.IngestAsync("nvd", CreateRawAdvisory(TestCve, TestAffectsKey, "NVD-ADV"));
// Then from Debian (higher precedence)
await service.IngestAsync("debian", CreateRawAdvisory(TestCve, TestAffectsKey, "DSA-ADV"));
// Act
var results = await service.GetByCveAsync(TestCve);
// Assert - Debian should be primary source (lower precedence rank = higher priority)
results.Should().HaveCount(1);
var canonical = results[0];
canonical.SourceEdges.Should().HaveCount(2);
var debianEdge = canonical.SourceEdges.First(e => e.SourceName == "debian");
var nvdEdge = canonical.SourceEdges.First(e => e.SourceName == "nvd");
// Debian (distro) should have lower precedence rank than NVD
debianEdge.PrecedenceRank.Should().BeLessThan(nvdEdge.PrecedenceRank);
}
/// <summary>
/// Tests that different CVEs create separate canonical advisories.
/// </summary>
[Fact]
public async Task DifferentCves_CreateSeparateCanonicals()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
var cve1 = "CVE-2025-0001";
var cve2 = "CVE-2025-0002";
// Act
var result1 = await service.IngestAsync("nvd", CreateRawAdvisory(cve1, TestAffectsKey, "NVD-1"));
var result2 = await service.IngestAsync("nvd", CreateRawAdvisory(cve2, TestAffectsKey, "NVD-2"));
// Assert - different CVEs = different canonicals
result1.Decision.Should().Be(MergeDecision.Created);
result2.Decision.Should().Be(MergeDecision.Created);
result1.CanonicalId.Should().NotBe(result2.CanonicalId);
result1.MergeHash.Should().NotBe(result2.MergeHash);
}
/// <summary>
/// Tests that same CVE but different packages create separate canonicals.
/// </summary>
[Fact]
public async Task SameCve_DifferentPackages_CreateSeparateCanonicals()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
var package1 = "pkg:npm/lodash@4.17.21";
var package2 = "pkg:npm/underscore@1.13.6";
// Act
var result1 = await service.IngestAsync("nvd", CreateRawAdvisory(TestCve, package1, "NVD-ADV"));
var result2 = await service.IngestAsync("nvd", CreateRawAdvisory(TestCve, package2, "NVD-ADV"));
// Assert - same CVE but different packages = different canonicals
result1.Decision.Should().Be(MergeDecision.Created);
result2.Decision.Should().Be(MergeDecision.Created);
result1.CanonicalId.Should().NotBe(result2.CanonicalId);
}
/// <summary>
/// Tests duplicate ingestion (same source, same advisory) returns Duplicate decision.
/// </summary>
[Fact]
public async Task DuplicateIngestion_ReturnsDuplicateDecision()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
var advisory = CreateRawAdvisory(TestCve, TestAffectsKey, "NVD-ADV");
// Act - ingest same advisory twice from same source
var result1 = await service.IngestAsync("nvd", advisory);
var result2 = await service.IngestAsync("nvd", advisory);
// Assert
result1.Decision.Should().Be(MergeDecision.Created);
result2.Decision.Should().Be(MergeDecision.Duplicate);
result1.CanonicalId.Should().Be(result2.CanonicalId);
}
/// <summary>
/// Tests batch ingestion produces correct deduplication.
/// </summary>
[Fact]
public async Task BatchIngestion_ProducesCorrectDeduplication()
{
// Arrange
var service = new CanonicalAdvisoryService(_store, _hashCalculator, _logger);
var advisories = new[]
{
CreateRawAdvisory("CVE-2025-0001", TestAffectsKey, "ADV-1"),
CreateRawAdvisory("CVE-2025-0001", TestAffectsKey, "ADV-2"), // Duplicate CVE
CreateRawAdvisory("CVE-2025-0002", TestAffectsKey, "ADV-3"),
CreateRawAdvisory("CVE-2025-0003", TestAffectsKey, "ADV-4"),
};
// Act
var results = await service.IngestBatchAsync("nvd", advisories);
// Assert
results.Should().HaveCount(4);
results[0].Decision.Should().Be(MergeDecision.Created); // First CVE-0001
results[1].Decision.Should().Be(MergeDecision.Merged); // Second CVE-0001 merges
results[2].Decision.Should().Be(MergeDecision.Created); // CVE-0002
results[3].Decision.Should().Be(MergeDecision.Created); // CVE-0003
// First two should have same canonical ID
results[0].CanonicalId.Should().Be(results[1].CanonicalId);
results[0].CanonicalId.Should().NotBe(results[2].CanonicalId);
results[2].CanonicalId.Should().NotBe(results[3].CanonicalId);
}
#region Helpers
private static RawAdvisory CreateRawAdvisory(
string cve,
string affectsKey,
string sourceAdvisoryId,
IReadOnlyList<string>? weaknesses = null)
{
return new RawAdvisory
{
SourceAdvisoryId = sourceAdvisoryId,
Cve = cve,
AffectsKey = affectsKey,
VersionRangeJson = "{\"introduced\":\"1.0.0\",\"fixed\":\"1.2.3\"}",
Weaknesses = weaknesses ?? [],
Severity = "high",
Title = $"Vulnerability in {affectsKey}",
Summary = $"Security issue {cve} affecting {affectsKey}",
RawPayloadJson = null,
FetchedAt = DateTimeOffset.UtcNow
};
}
#endregion
#region In-Memory Test Implementations
/// <summary>
/// In-memory implementation of ICanonicalAdvisoryStore for testing.
/// </summary>
private sealed class InMemoryCanonicalAdvisoryStore : ICanonicalAdvisoryStore
{
private readonly Dictionary<Guid, CanonicalAdvisory> _canonicals = new();
private readonly Dictionary<string, Guid> _mergeHashIndex = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, Guid> _sourceIds = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<Guid, List<SourceEdge>> _sourceEdges = new();
private readonly Dictionary<string, Guid> _edgeHashes = new(StringComparer.OrdinalIgnoreCase);
public Task<CanonicalAdvisory?> GetByIdAsync(Guid id, CancellationToken ct = default)
{
if (_canonicals.TryGetValue(id, out var canonical))
{
// Attach source edges
var edges = _sourceEdges.GetValueOrDefault(id) ?? new List<SourceEdge>();
return Task.FromResult<CanonicalAdvisory?>(canonical with
{
SourceEdges = edges.OrderBy(e => e.PrecedenceRank).ToList()
});
}
return Task.FromResult<CanonicalAdvisory?>(null);
}
public Task<CanonicalAdvisory?> GetByMergeHashAsync(string mergeHash, CancellationToken ct = default)
{
if (_mergeHashIndex.TryGetValue(mergeHash, out var id))
{
return GetByIdAsync(id, ct);
}
return Task.FromResult<CanonicalAdvisory?>(null);
}
public Task<IReadOnlyList<CanonicalAdvisory>> GetByCveAsync(string cve, CancellationToken ct = default)
{
var results = _canonicals.Values
.Where(c => c.Cve.Equals(cve, StringComparison.OrdinalIgnoreCase))
.Select(c => c with
{
SourceEdges = (_sourceEdges.GetValueOrDefault(c.Id) ?? new List<SourceEdge>())
.OrderBy(e => e.PrecedenceRank)
.ToList()
})
.ToList();
return Task.FromResult<IReadOnlyList<CanonicalAdvisory>>(results);
}
public Task<IReadOnlyList<CanonicalAdvisory>> GetByArtifactAsync(string artifactKey, CancellationToken ct = default)
{
var results = _canonicals.Values
.Where(c => c.AffectsKey.Equals(artifactKey, StringComparison.OrdinalIgnoreCase))
.Select(c => c with
{
SourceEdges = (_sourceEdges.GetValueOrDefault(c.Id) ?? new List<SourceEdge>())
.OrderBy(e => e.PrecedenceRank)
.ToList()
})
.ToList();
return Task.FromResult<IReadOnlyList<CanonicalAdvisory>>(results);
}
public Task<PagedResult<CanonicalAdvisory>> QueryAsync(CanonicalQueryOptions options, CancellationToken ct = default)
{
var query = _canonicals.Values.AsEnumerable();
if (!string.IsNullOrWhiteSpace(options.Cve))
query = query.Where(c => c.Cve.Equals(options.Cve, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(options.Severity))
query = query.Where(c => c.Severity == options.Severity);
var total = query.Count();
var items = query.Skip(options.Offset).Take(options.Limit).ToList();
return Task.FromResult(new PagedResult<CanonicalAdvisory>
{
Items = items,
TotalCount = total,
Offset = options.Offset,
Limit = options.Limit
});
}
public Task<Guid> UpsertCanonicalAsync(UpsertCanonicalRequest request, CancellationToken ct = default)
{
Guid id;
if (_mergeHashIndex.TryGetValue(request.MergeHash, out id))
{
// Update existing
var existing = _canonicals[id];
_canonicals[id] = existing with
{
Severity = request.Severity ?? existing.Severity,
Title = request.Title ?? existing.Title,
Summary = request.Summary ?? existing.Summary,
UpdatedAt = DateTimeOffset.UtcNow
};
}
else
{
// Create new
id = Guid.NewGuid();
_mergeHashIndex[request.MergeHash] = id;
_canonicals[id] = new CanonicalAdvisory
{
Id = id,
Cve = request.Cve,
AffectsKey = request.AffectsKey,
MergeHash = request.MergeHash,
Weaknesses = request.Weaknesses ?? [],
Severity = request.Severity,
Title = request.Title,
Summary = request.Summary,
CreatedAt = DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
};
}
return Task.FromResult(id);
}
public Task<SourceEdgeResult> AddSourceEdgeAsync(AddSourceEdgeRequest request, CancellationToken ct = default)
{
// Create unique key for edge (canonical + source + doc hash)
var edgeKey = $"{request.CanonicalId}|{request.SourceId}|{request.SourceDocHash}";
if (_edgeHashes.TryGetValue(edgeKey, out var existingId))
{
// Duplicate edge - return existing
return Task.FromResult(SourceEdgeResult.Existing(existingId));
}
var edgeId = Guid.NewGuid();
var edge = new SourceEdge
{
Id = edgeId,
SourceName = GetSourceName(request.SourceId),
SourceAdvisoryId = request.SourceAdvisoryId,
SourceDocHash = request.SourceDocHash,
VendorStatus = request.VendorStatus,
PrecedenceRank = request.PrecedenceRank,
FetchedAt = request.FetchedAt
};
if (!_sourceEdges.ContainsKey(request.CanonicalId))
{
_sourceEdges[request.CanonicalId] = new List<SourceEdge>();
}
_sourceEdges[request.CanonicalId].Add(edge);
_edgeHashes[edgeKey] = edgeId;
return Task.FromResult(SourceEdgeResult.Created(edgeId));
}
public Task<IReadOnlyList<SourceEdge>> GetSourceEdgesAsync(Guid canonicalId, CancellationToken ct = default)
{
var edges = _sourceEdges.GetValueOrDefault(canonicalId) ?? new List<SourceEdge>();
return Task.FromResult<IReadOnlyList<SourceEdge>>(edges.OrderBy(e => e.PrecedenceRank).ToList());
}
public Task<bool> SourceEdgeExistsAsync(Guid canonicalId, Guid sourceId, string docHash, CancellationToken ct = default)
{
var edgeKey = $"{canonicalId}|{sourceId}|{docHash}";
return Task.FromResult(_edgeHashes.ContainsKey(edgeKey));
}
public Task<Guid> ResolveSourceIdAsync(string sourceName, CancellationToken ct = default)
{
if (!_sourceIds.TryGetValue(sourceName, out var id))
{
id = Guid.NewGuid();
_sourceIds[sourceName] = id;
}
return Task.FromResult(id);
}
public Task<int> GetSourcePrecedenceAsync(string sourceKey, CancellationToken ct = default)
{
// Source precedence (lower = higher priority)
var rank = sourceKey.ToLowerInvariant() switch
{
"cisco" or "oracle" or "microsoft" or "adobe" => 10, // Vendor PSIRT
"redhat" or "debian" or "suse" or "ubuntu" or "alpine" => 20, // Distro
"osv" => 30,
"ghsa" => 35,
"nvd" => 40,
_ => 100 // Community
};
return Task.FromResult(rank);
}
public Task UpdateStatusAsync(Guid id, CanonicalStatus status, CancellationToken ct = default)
{
if (_canonicals.TryGetValue(id, out var existing))
{
_canonicals[id] = existing with { Status = status, UpdatedAt = DateTimeOffset.UtcNow };
}
return Task.CompletedTask;
}
public Task<long> CountAsync(CancellationToken ct = default)
{
return Task.FromResult((long)_canonicals.Count);
}
private string GetSourceName(Guid sourceId)
{
return _sourceIds.FirstOrDefault(kvp => kvp.Value == sourceId).Key ?? "unknown";
}
}
/// <summary>
/// Real implementation of IMergeHashCalculator for testing.
/// </summary>
private sealed class RealMergeHashCalculator : IMergeHashCalculator
{
public string ComputeMergeHash(MergeHashInput input)
{
// Compute deterministic hash from: CVE | AFFECTS | VERSION | CWE | LINEAGE
var components = new List<string>
{
input.Cve?.ToUpperInvariant() ?? "",
input.AffectsKey?.ToLowerInvariant() ?? ""
};
// VersionRange is a string (JSON), include if present
if (!string.IsNullOrWhiteSpace(input.VersionRange))
{
components.Add(input.VersionRange);
}
if (input.Weaknesses?.Count > 0)
{
components.AddRange(input.Weaknesses.OrderBy(w => w, StringComparer.OrdinalIgnoreCase));
}
if (!string.IsNullOrWhiteSpace(input.PatchLineage))
{
components.Add(input.PatchLineage);
}
var combined = string.Join("|", components);
using var sha256 = System.Security.Cryptography.SHA256.Create();
var hashBytes = sha256.ComputeHash(System.Text.Encoding.UTF8.GetBytes(combined));
return "sha256:" + Convert.ToHexString(hashBytes).ToLowerInvariant();
}
}
#endregion
}

View File

@@ -0,0 +1,445 @@
// -----------------------------------------------------------------------------
// InterestScoreCalculatorTests.cs
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
// Task: ISCORE-8200-013
// Description: Unit tests for InterestScoreCalculator
// -----------------------------------------------------------------------------
using FluentAssertions;
using StellaOps.Concelier.Interest.Models;
using Xunit;
namespace StellaOps.Concelier.Interest.Tests;
public class InterestScoreCalculatorTests
{
private readonly InterestScoreCalculator _calculator;
private readonly InterestScoreWeights _defaultWeights = new();
public InterestScoreCalculatorTests()
{
_calculator = new InterestScoreCalculator(_defaultWeights);
}
[Fact]
public void Calculate_WithNoSignals_ReturnsBaseScore()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches = [],
VexStatements = [],
RuntimeSignals = []
};
// Act
var result = _calculator.Calculate(input);
// Assert
// Only no_vex_na applies (0.15) when no signals
result.Score.Should().Be(0.15);
result.Reasons.Should().Contain("no_vex_na");
result.Reasons.Should().HaveCount(1);
}
[Fact]
public void Calculate_WithSbomMatch_AddsInSbomFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
IsReachable = false,
IsDeployed = false,
ScannedAt = DateTimeOffset.UtcNow
}
]
};
// Act
var result = _calculator.Calculate(input);
// Assert
result.Score.Should().Be(0.45); // in_sbom (0.30) + no_vex_na (0.15)
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("no_vex_na");
}
[Fact]
public void Calculate_WithReachableSbomMatch_AddsReachableFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
IsReachable = true,
IsDeployed = false,
ScannedAt = DateTimeOffset.UtcNow
}
]
};
// Act
var result = _calculator.Calculate(input);
// Assert
result.Score.Should().Be(0.70); // in_sbom (0.30) + reachable (0.25) + no_vex_na (0.15)
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("reachable");
result.Reasons.Should().Contain("no_vex_na");
}
[Fact]
public void Calculate_WithDeployedSbomMatch_AddsDeployedFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
IsReachable = false,
IsDeployed = true,
ScannedAt = DateTimeOffset.UtcNow
}
]
};
// Act
var result = _calculator.Calculate(input);
// Assert
result.Score.Should().Be(0.65); // in_sbom (0.30) + deployed (0.20) + no_vex_na (0.15)
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("deployed");
result.Reasons.Should().Contain("no_vex_na");
}
[Fact]
public void Calculate_WithFullSbomMatch_AddsAllSbomFactors()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
IsReachable = true,
IsDeployed = true,
ScannedAt = DateTimeOffset.UtcNow
}
]
};
// Act
var result = _calculator.Calculate(input);
// Assert
result.Score.Should().Be(0.90); // in_sbom (0.30) + reachable (0.25) + deployed (0.20) + no_vex_na (0.15)
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("reachable");
result.Reasons.Should().Contain("deployed");
result.Reasons.Should().Contain("no_vex_na");
}
[Fact]
public void Calculate_WithVexNotAffected_ExcludesVexFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
IsReachable = true,
IsDeployed = true,
ScannedAt = DateTimeOffset.UtcNow
}
],
VexStatements =
[
new VexStatement
{
StatementId = "VEX-001",
Status = VexStatus.NotAffected,
Justification = "Component not used in affected context"
}
]
};
// Act
var result = _calculator.Calculate(input);
// Assert
result.Score.Should().Be(0.75); // in_sbom (0.30) + reachable (0.25) + deployed (0.20) - NO no_vex_na
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("reachable");
result.Reasons.Should().Contain("deployed");
result.Reasons.Should().NotContain("no_vex_na");
}
[Fact]
public void Calculate_WithRecentLastSeen_AddsRecentFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
ScannedAt = DateTimeOffset.UtcNow
}
],
LastSeenInBuild = DateTimeOffset.UtcNow.AddDays(-7) // 7 days ago
};
// Act
var result = _calculator.Calculate(input);
// Assert
// in_sbom (0.30) + no_vex_na (0.15) + recent (~0.098 for 7 days)
result.Score.Should().BeApproximately(0.55, 0.02);
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("no_vex_na");
result.Reasons.Should().Contain("recent");
}
[Fact]
public void Calculate_WithOldLastSeen_DecaysRecentFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
ScannedAt = DateTimeOffset.UtcNow.AddDays(-300)
}
],
LastSeenInBuild = DateTimeOffset.UtcNow.AddDays(-300) // 300 days ago
};
// Act
var result = _calculator.Calculate(input);
// Assert
// in_sbom (0.30) + no_vex_na (0.15) + recent (~0.018 for 300 days, no "recent" reason)
result.Score.Should().BeApproximately(0.47, 0.02);
result.Reasons.Should().Contain("in_sbom");
result.Reasons.Should().Contain("no_vex_na");
result.Reasons.Should().NotContain("recent"); // decayFactor < 0.5
}
[Fact]
public void Calculate_WithVeryOldLastSeen_NoRecentFactor()
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches = [],
LastSeenInBuild = DateTimeOffset.UtcNow.AddDays(-400) // > 1 year
};
// Act
var result = _calculator.Calculate(input);
// Assert
// Only no_vex_na (0.15), no recent factor (decayed to 0)
result.Score.Should().Be(0.15);
result.Reasons.Should().Contain("no_vex_na");
result.Reasons.Should().NotContain("recent");
}
[Fact]
public void Calculate_MaxScore_IsCappedAt1()
{
// Arrange - use custom weights that exceed 1.0
var heavyWeights = new InterestScoreWeights
{
InSbom = 0.50,
Reachable = 0.40,
Deployed = 0.30,
NoVexNotAffected = 0.20,
Recent = 0.10
};
var calculator = new InterestScoreCalculator(heavyWeights);
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
SbomMatches =
[
new SbomMatch
{
SbomDigest = "sha256:abc123",
Purl = "pkg:npm/lodash@4.17.21",
IsReachable = true,
IsDeployed = true,
ScannedAt = DateTimeOffset.UtcNow
}
],
LastSeenInBuild = DateTimeOffset.UtcNow
};
// Act
var result = calculator.Calculate(input);
// Assert
result.Score.Should().Be(1.0);
}
[Fact]
public void Calculate_SetsComputedAtToNow()
{
// Arrange
var input = new InterestScoreInput { CanonicalId = Guid.NewGuid() };
var before = DateTimeOffset.UtcNow;
// Act
var result = _calculator.Calculate(input);
var after = DateTimeOffset.UtcNow;
// Assert
result.ComputedAt.Should().BeOnOrAfter(before);
result.ComputedAt.Should().BeOnOrBefore(after);
}
[Fact]
public void Calculate_PreservesCanonicalId()
{
// Arrange
var canonicalId = Guid.NewGuid();
var input = new InterestScoreInput { CanonicalId = canonicalId };
// Act
var result = _calculator.Calculate(input);
// Assert
result.CanonicalId.Should().Be(canonicalId);
}
[Theory]
[InlineData(VexStatus.Affected)]
[InlineData(VexStatus.Fixed)]
[InlineData(VexStatus.UnderInvestigation)]
public void Calculate_WithNonExcludingVexStatus_IncludesNoVexNaFactor(VexStatus status)
{
// Arrange
var input = new InterestScoreInput
{
CanonicalId = Guid.NewGuid(),
VexStatements =
[
new VexStatement
{
StatementId = "VEX-001",
Status = status
}
]
};
// Act
var result = _calculator.Calculate(input);
// Assert
result.Reasons.Should().Contain("no_vex_na");
}
[Fact]
public void InterestTier_HighScore_ReturnsHigh()
{
// Arrange
var score = new InterestScore
{
CanonicalId = Guid.NewGuid(),
Score = 0.75,
Reasons = [],
ComputedAt = DateTimeOffset.UtcNow
};
// Assert
score.Tier.Should().Be(InterestTier.High);
}
[Fact]
public void InterestTier_MediumScore_ReturnsMedium()
{
// Arrange
var score = new InterestScore
{
CanonicalId = Guid.NewGuid(),
Score = 0.50,
Reasons = [],
ComputedAt = DateTimeOffset.UtcNow
};
// Assert
score.Tier.Should().Be(InterestTier.Medium);
}
[Fact]
public void InterestTier_LowScore_ReturnsLow()
{
// Arrange
var score = new InterestScore
{
CanonicalId = Guid.NewGuid(),
Score = 0.30,
Reasons = [],
ComputedAt = DateTimeOffset.UtcNow
};
// Assert
score.Tier.Should().Be(InterestTier.Low);
}
[Fact]
public void InterestTier_NoneScore_ReturnsNone()
{
// Arrange
var score = new InterestScore
{
CanonicalId = Guid.NewGuid(),
Score = 0.10,
Reasons = [],
ComputedAt = DateTimeOffset.UtcNow
};
// Assert
score.Tier.Should().Be(InterestTier.None);
}
}

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>StellaOps.Concelier.Interest.Tests</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="8.0.0" />
<PackageReference Include="Moq" Version="4.20.72" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Concelier.Interest\StellaOps.Concelier.Interest.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,658 @@
// -----------------------------------------------------------------------------
// SyncLedgerRepositoryTests.cs
// Sprint: SPRINT_8200_0014_0001_DB_sync_ledger_schema
// Tasks: SYNC-8200-003 (migration), SYNC-8200-008 (repo), SYNC-8200-012 (cursor), SYNC-8200-016 (policy)
// Description: Integration tests for SyncLedger repository and schema
// -----------------------------------------------------------------------------
using Dapper;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Npgsql;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Concelier.Storage.Postgres.Repositories;
using StellaOps.Concelier.Storage.Postgres.Sync;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Concelier.Storage.Postgres.Tests;
/// <summary>
/// Integration tests for SyncLedgerRepository and SitePolicyEnforcementService.
/// Covers Tasks 3, 8, 12, and 16 from SPRINT_8200_0014_0001.
/// </summary>
[Collection(ConcelierPostgresCollection.Name)]
[Trait("Category", TestCategories.Integration)]
[Trait("Category", "SyncLedger")]
public sealed class SyncLedgerRepositoryTests : IAsyncLifetime
{
private readonly ConcelierPostgresFixture _fixture;
private readonly ConcelierDataSource _dataSource;
private readonly SyncLedgerRepository _repository;
private readonly SitePolicyEnforcementService _policyService;
public SyncLedgerRepositoryTests(ConcelierPostgresFixture fixture)
{
_fixture = fixture;
var options = fixture.Fixture.CreateOptions();
_dataSource = new ConcelierDataSource(Options.Create(options), NullLogger<ConcelierDataSource>.Instance);
_repository = new SyncLedgerRepository(_dataSource, NullLogger<SyncLedgerRepository>.Instance);
_policyService = new SitePolicyEnforcementService(_repository, NullLogger<SitePolicyEnforcementService>.Instance);
}
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
public Task DisposeAsync() => Task.CompletedTask;
#region Task 3: Migration Validation
[Fact]
public async Task Migration_SyncLedgerTableExists()
{
// Assert
await using var connection = new NpgsqlConnection(_fixture.Fixture.ConnectionString);
await connection.OpenAsync();
var exists = await connection.ExecuteScalarAsync<bool>(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'vuln' AND table_name = 'sync_ledger')");
exists.Should().BeTrue("sync_ledger table should exist after migration");
}
[Fact]
public async Task Migration_SitePolicyTableExists()
{
// Assert
await using var connection = new NpgsqlConnection(_fixture.Fixture.ConnectionString);
await connection.OpenAsync();
var exists = await connection.ExecuteScalarAsync<bool>(
"SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'vuln' AND table_name = 'site_policy')");
exists.Should().BeTrue("site_policy table should exist after migration");
}
[Fact]
public async Task Migration_IndexesExist()
{
// Assert
await using var connection = new NpgsqlConnection(_fixture.Fixture.ConnectionString);
await connection.OpenAsync();
var indexes = await connection.QueryAsync<string>(
@"SELECT indexname FROM pg_indexes
WHERE schemaname = 'vuln'
AND (tablename = 'sync_ledger' OR tablename = 'site_policy')");
var indexList = indexes.ToList();
indexList.Should().Contain("idx_sync_ledger_site");
indexList.Should().Contain("idx_sync_ledger_site_time");
}
[Fact]
public async Task Migration_ConstraintsExist()
{
// Assert
await using var connection = new NpgsqlConnection(_fixture.Fixture.ConnectionString);
await connection.OpenAsync();
var constraints = await connection.QueryAsync<string>(
@"SELECT constraint_name FROM information_schema.table_constraints
WHERE table_schema = 'vuln'
AND table_name = 'sync_ledger'
AND constraint_type = 'UNIQUE'");
var constraintList = constraints.ToList();
constraintList.Should().Contain("uq_sync_ledger_site_cursor");
constraintList.Should().Contain("uq_sync_ledger_bundle");
}
#endregion
#region Task 8: Repository Operations
[Fact]
public async Task InsertAsync_CreatesLedgerEntry()
{
// Arrange
var entry = CreateLedgerEntry("site-test-001", "sha256:abc123", 100);
// Act
var id = await _repository.InsertAsync(entry);
// Assert
id.Should().NotBe(Guid.Empty);
var retrieved = await _repository.GetLatestAsync("site-test-001");
retrieved.Should().NotBeNull();
retrieved!.SiteId.Should().Be("site-test-001");
retrieved.BundleHash.Should().Be("sha256:abc123");
retrieved.ItemsCount.Should().Be(100);
}
[Fact]
public async Task GetLatestAsync_ReturnsNewestEntry()
{
// Arrange
var siteId = $"site-latest-{Guid.NewGuid():N}";
var baseTime = DateTimeOffset.UtcNow.AddHours(-2);
for (int i = 0; i < 3; i++)
{
await _repository.InsertAsync(CreateLedgerEntry(
siteId,
$"hash-{i}-{Guid.NewGuid():N}",
(i + 1) * 10,
baseTime.AddMinutes(i * 10),
i));
}
// Act
var latest = await _repository.GetLatestAsync(siteId);
// Assert
latest.Should().NotBeNull();
latest!.ItemsCount.Should().Be(30); // The third entry (30 items)
}
[Fact]
public async Task GetHistoryAsync_ReturnsEntriesInDescendingOrder()
{
// Arrange
var siteId = $"site-history-{Guid.NewGuid():N}";
var baseTime = DateTimeOffset.UtcNow.AddHours(-1);
for (int i = 0; i < 5; i++)
{
await _repository.InsertAsync(CreateLedgerEntry(
siteId,
$"hash-hist-{i}-{Guid.NewGuid():N}",
(i + 1) * 5,
baseTime.AddMinutes(i * 5),
i));
}
// Act
var history = await _repository.GetHistoryAsync(siteId, limit: 3);
// Assert
history.Should().HaveCount(3);
history[0].ItemsCount.Should().Be(25); // Most recent
history[1].ItemsCount.Should().Be(20);
history[2].ItemsCount.Should().Be(15);
}
[Fact]
public async Task GetByBundleHashAsync_FindsEntry()
{
// Arrange
var uniqueHash = $"sha256:unique-{Guid.NewGuid():N}";
await _repository.InsertAsync(CreateLedgerEntry("site-hash-test", uniqueHash, 42));
// Act
var found = await _repository.GetByBundleHashAsync(uniqueHash);
// Assert
found.Should().NotBeNull();
found!.BundleHash.Should().Be(uniqueHash);
found.ItemsCount.Should().Be(42);
}
[Fact]
public async Task GetByBundleHashAsync_ReturnsNull_WhenNotFound()
{
// Act
var result = await _repository.GetByBundleHashAsync("sha256:nonexistent");
// Assert
result.Should().BeNull();
}
[Fact]
public async Task UpsertPolicyAsync_CreatesPolicyWhenNew()
{
// Arrange
var policy = CreatePolicy($"site-policy-{Guid.NewGuid():N}", "Test Site", ["nvd", "ghsa"], ["untrusted-*"], 50, 5000);
// Act
await _repository.UpsertPolicyAsync(policy);
// Assert
var retrieved = await _repository.GetPolicyAsync(policy.SiteId);
retrieved.Should().NotBeNull();
retrieved!.DisplayName.Should().Be("Test Site");
retrieved.AllowedSources.Should().BeEquivalentTo(["nvd", "ghsa"]);
retrieved.MaxBundleSizeMb.Should().Be(50);
}
[Fact]
public async Task UpsertPolicyAsync_UpdatesExistingPolicy()
{
// Arrange
var siteId = $"site-upsert-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, "Original", maxSizeMb: 100, enabled: true));
// Act - Update
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, "Updated", maxSizeMb: 200, enabled: false));
// Assert
var retrieved = await _repository.GetPolicyAsync(siteId);
retrieved.Should().NotBeNull();
retrieved!.DisplayName.Should().Be("Updated");
retrieved.MaxBundleSizeMb.Should().Be(200);
retrieved.Enabled.Should().BeFalse();
}
[Fact]
public async Task GetAllPoliciesAsync_FiltersEnabledOnly()
{
// Arrange
var prefix = $"bulk-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy($"{prefix}-enabled", enabled: true));
await _repository.UpsertPolicyAsync(CreatePolicy($"{prefix}-disabled", enabled: false));
// Act
var enabledOnly = await _repository.GetAllPoliciesAsync(enabledOnly: true);
var all = await _repository.GetAllPoliciesAsync(enabledOnly: false);
// Assert
enabledOnly.Should().Contain(p => p.SiteId == $"{prefix}-enabled");
enabledOnly.Should().NotContain(p => p.SiteId == $"{prefix}-disabled");
all.Should().Contain(p => p.SiteId == $"{prefix}-enabled");
all.Should().Contain(p => p.SiteId == $"{prefix}-disabled");
}
[Fact]
public async Task GetStatisticsAsync_ReturnsCorrectCounts()
{
// Arrange - Create some test data
var siteId = $"stats-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, enabled: true));
await _repository.InsertAsync(CreateLedgerEntry(siteId, $"stats-hash-{Guid.NewGuid():N}", 100));
// Act
var stats = await _repository.GetStatisticsAsync();
// Assert
stats.TotalSites.Should().BeGreaterThan(0);
stats.TotalBundlesImported.Should().BeGreaterThan(0);
stats.TotalItemsImported.Should().BeGreaterThanOrEqualTo(100);
}
#endregion
#region Task 12: Cursor Advancement and Conflict Handling
[Fact]
public async Task GetCursorAsync_ReturnsLatestCursor()
{
// Arrange
var siteId = $"cursor-{Guid.NewGuid():N}";
var cursor1 = CursorFormat.Create(DateTimeOffset.UtcNow.AddMinutes(-10), 0);
var cursor2 = CursorFormat.Create(DateTimeOffset.UtcNow, 1);
await _repository.AdvanceCursorAsync(siteId, cursor1, $"hash1-{Guid.NewGuid():N}", 10, DateTimeOffset.UtcNow.AddMinutes(-10));
await _repository.AdvanceCursorAsync(siteId, cursor2, $"hash2-{Guid.NewGuid():N}", 20, DateTimeOffset.UtcNow);
// Act
var currentCursor = await _repository.GetCursorAsync(siteId);
// Assert
currentCursor.Should().Be(cursor2);
}
[Fact]
public async Task GetCursorAsync_ReturnsNull_WhenNoHistory()
{
// Act
var cursor = await _repository.GetCursorAsync($"nonexistent-{Guid.NewGuid():N}");
// Assert
cursor.Should().BeNull();
}
[Fact]
public async Task AdvanceCursorAsync_CreatesLedgerEntry()
{
// Arrange
var siteId = $"advance-{Guid.NewGuid():N}";
var cursor = CursorFormat.Create(DateTimeOffset.UtcNow, 42);
var bundleHash = $"adv-hash-{Guid.NewGuid():N}";
// Act
await _repository.AdvanceCursorAsync(siteId, cursor, bundleHash, 150, DateTimeOffset.UtcNow);
// Assert
var entry = await _repository.GetLatestAsync(siteId);
entry.Should().NotBeNull();
entry!.Cursor.Should().Be(cursor);
entry.BundleHash.Should().Be(bundleHash);
entry.ItemsCount.Should().Be(150);
}
[Fact]
public async Task IsCursorConflictAsync_ReturnsFalse_WhenCursorIsNewer()
{
// Arrange
var siteId = $"conflict-newer-{Guid.NewGuid():N}";
var oldCursor = CursorFormat.Create(DateTimeOffset.UtcNow.AddHours(-1), 0);
var newCursor = CursorFormat.Create(DateTimeOffset.UtcNow, 1);
await _repository.AdvanceCursorAsync(siteId, oldCursor, $"ch1-{Guid.NewGuid():N}", 10, DateTimeOffset.UtcNow.AddHours(-1));
// Act
var isConflict = await _repository.IsCursorConflictAsync(siteId, newCursor);
// Assert
isConflict.Should().BeFalse("newer cursor should not conflict");
}
[Fact]
public async Task IsCursorConflictAsync_ReturnsTrue_WhenCursorIsOlder()
{
// Arrange
var siteId = $"conflict-older-{Guid.NewGuid():N}";
var currentCursor = CursorFormat.Create(DateTimeOffset.UtcNow, 1);
var olderCursor = CursorFormat.Create(DateTimeOffset.UtcNow.AddHours(-1), 0);
await _repository.AdvanceCursorAsync(siteId, currentCursor, $"ch2-{Guid.NewGuid():N}", 10, DateTimeOffset.UtcNow);
// Act
var isConflict = await _repository.IsCursorConflictAsync(siteId, olderCursor);
// Assert
isConflict.Should().BeTrue("older cursor should conflict with current");
}
[Fact]
public async Task IsCursorConflictAsync_ReturnsFalse_WhenNoExistingCursor()
{
// Act
var isConflict = await _repository.IsCursorConflictAsync(
$"no-cursor-{Guid.NewGuid():N}",
CursorFormat.Create(DateTimeOffset.UtcNow, 0));
// Assert
isConflict.Should().BeFalse("no existing cursor means no conflict");
}
[Fact]
public void CursorFormat_Create_ProducesValidFormat()
{
// Arrange
var timestamp = DateTimeOffset.Parse("2025-01-15T10:30:00.000Z");
// Act
var cursor = CursorFormat.Create(timestamp, 42);
// Assert
cursor.Should().Contain("2025-01-15");
cursor.Should().EndWith("#0042");
}
[Fact]
public void CursorFormat_Parse_ExtractsComponents()
{
// Arrange
var cursor = "2025-01-15T10:30:00.0000000+00:00#0042";
// Act
var (timestamp, sequence) = CursorFormat.Parse(cursor);
// Assert
timestamp.Year.Should().Be(2025);
timestamp.Month.Should().Be(1);
timestamp.Day.Should().Be(15);
sequence.Should().Be(42);
}
[Fact]
public void CursorFormat_IsAfter_ComparesCorrectly()
{
// Arrange
var earlier = CursorFormat.Create(DateTimeOffset.Parse("2025-01-15T10:00:00Z"), 0);
var later = CursorFormat.Create(DateTimeOffset.Parse("2025-01-15T11:00:00Z"), 0);
var sameTimeHigherSeq = CursorFormat.Create(DateTimeOffset.Parse("2025-01-15T10:00:00Z"), 5);
// Assert
CursorFormat.IsAfter(later, earlier).Should().BeTrue("later timestamp is after");
CursorFormat.IsAfter(earlier, later).Should().BeFalse("earlier timestamp is not after");
CursorFormat.IsAfter(sameTimeHigherSeq, earlier).Should().BeTrue("higher sequence is after");
}
#endregion
#region Task 16: Policy Enforcement
[Fact]
public async Task ValidateSourceAsync_AllowsWhenNoPolicy()
{
// Act
var result = await _policyService.ValidateSourceAsync($"no-policy-{Guid.NewGuid():N}", "nvd");
// Assert
result.IsAllowed.Should().BeTrue();
result.Reason.Should().Contain("No policy");
}
[Fact]
public async Task ValidateSourceAsync_DeniesWhenPolicyDisabled()
{
// Arrange
var siteId = $"disabled-policy-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, enabled: false));
// Act
var result = await _policyService.ValidateSourceAsync(siteId, "nvd");
// Assert
result.IsAllowed.Should().BeFalse();
result.Reason.Should().Contain("disabled");
}
[Fact]
public async Task ValidateSourceAsync_DeniesWhenInDenyList()
{
// Arrange
var siteId = $"deny-list-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, deniedSources: ["untrusted", "blocked-*"], enabled: true));
// Act
var result = await _policyService.ValidateSourceAsync(siteId, "untrusted");
// Assert
result.IsAllowed.Should().BeFalse();
result.Reason.Should().Contain("deny list");
}
[Fact]
public async Task ValidateSourceAsync_DeniesWildcardMatch()
{
// Arrange
var siteId = $"wildcard-deny-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, deniedSources: ["blocked-*"], enabled: true));
// Act
var result = await _policyService.ValidateSourceAsync(siteId, "blocked-source-1");
// Assert
result.IsAllowed.Should().BeFalse("wildcard deny should match");
}
[Fact]
public async Task ValidateSourceAsync_AllowsWhenNoAllowList()
{
// Arrange
var siteId = $"no-allow-list-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, allowedSources: [], deniedSources: [], enabled: true));
// Act
var result = await _policyService.ValidateSourceAsync(siteId, "any-source");
// Assert
result.IsAllowed.Should().BeTrue();
result.Reason.Should().Contain("No allow list");
}
[Fact]
public async Task ValidateSourceAsync_AllowsWhenInAllowList()
{
// Arrange
var siteId = $"allow-list-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, allowedSources: ["nvd", "ghsa", "osv"], enabled: true));
// Act
var result = await _policyService.ValidateSourceAsync(siteId, "nvd");
// Assert
result.IsAllowed.Should().BeTrue();
result.Reason.Should().Contain("allow list");
}
[Fact]
public async Task ValidateSourceAsync_DeniesWhenNotInAllowList()
{
// Arrange
var siteId = $"not-in-allow-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, allowedSources: ["nvd", "ghsa"], enabled: true));
// Act
var result = await _policyService.ValidateSourceAsync(siteId, "random-source");
// Assert
result.IsAllowed.Should().BeFalse();
result.Reason.Should().Contain("not in allow list");
}
[Fact]
public async Task ValidateBundleSizeAsync_AllowsWithinLimits()
{
// Arrange
var siteId = $"size-ok-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, maxSizeMb: 100, maxItems: 10000, enabled: true));
// Act
var result = await _policyService.ValidateBundleSizeAsync(siteId, 50, 5000);
// Assert
result.IsAllowed.Should().BeTrue();
result.ActualSizeMb.Should().Be(50);
result.ActualItemCount.Should().Be(5000);
}
[Fact]
public async Task ValidateBundleSizeAsync_DeniesExceedsSizeLimit()
{
// Arrange
var siteId = $"size-exceed-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, maxSizeMb: 50, maxItems: 10000, enabled: true));
// Act
var result = await _policyService.ValidateBundleSizeAsync(siteId, 75, 100);
// Assert
result.IsAllowed.Should().BeFalse();
result.Reason.Should().Contain("exceeds limit");
}
[Fact]
public async Task ValidateBundleSizeAsync_DeniesExceedsItemLimit()
{
// Arrange
var siteId = $"items-exceed-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, maxSizeMb: 100, maxItems: 1000, enabled: true));
// Act
var result = await _policyService.ValidateBundleSizeAsync(siteId, 10, 5000);
// Assert
result.IsAllowed.Should().BeFalse();
result.Reason.Should().Contain("Item count");
}
[Fact]
public async Task GetRemainingBudgetAsync_ReturnsBudgetInfo()
{
// Arrange
var siteId = $"budget-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, maxSizeMb: 100, maxItems: 5000, enabled: true));
await _repository.AdvanceCursorAsync(siteId, CursorFormat.Create(DateTimeOffset.UtcNow, 0),
$"budget-hash-{Guid.NewGuid():N}", 200, DateTimeOffset.UtcNow);
// Act
var budget = await _policyService.GetRemainingBudgetAsync(siteId, windowHours: 24);
// Assert
budget.HasPolicy.Should().BeTrue();
budget.MaxBundleSizeMb.Should().Be(100);
budget.MaxItemsPerBundle.Should().Be(5000);
budget.RecentItemsImported.Should().BeGreaterThanOrEqualTo(200);
}
[Fact]
public async Task FilterAllowedSourcesAsync_FiltersCorrectly()
{
// Arrange
var siteId = $"filter-{Guid.NewGuid():N}";
await _repository.UpsertPolicyAsync(CreatePolicy(siteId, allowedSources: ["nvd", "ghsa"], deniedSources: ["blocked"], enabled: true));
// Act
var allowed = await _policyService.FilterAllowedSourcesAsync(
siteId,
new[] { "nvd", "ghsa", "osv", "blocked" });
// Assert
allowed.Should().BeEquivalentTo(["nvd", "ghsa"]);
allowed.Should().NotContain("osv"); // Not in allow list
allowed.Should().NotContain("blocked"); // In deny list
}
#endregion
#region Helpers
private static SyncLedgerEntity CreateLedgerEntry(
string siteId,
string bundleHash,
int itemsCount,
DateTimeOffset? signedAt = null,
int sequence = 0)
{
var ts = signedAt ?? DateTimeOffset.UtcNow;
return new SyncLedgerEntity
{
Id = Guid.NewGuid(),
SiteId = siteId,
Cursor = CursorFormat.Create(ts, sequence),
BundleHash = bundleHash,
ItemsCount = itemsCount,
SignedAt = ts
};
}
private static SitePolicyEntity CreatePolicy(
string siteId,
string? displayName = null,
string[]? allowedSources = null,
string[]? deniedSources = null,
int maxSizeMb = 100,
int maxItems = 10000,
bool enabled = true)
{
return new SitePolicyEntity
{
Id = Guid.NewGuid(),
SiteId = siteId,
DisplayName = displayName,
AllowedSources = allowedSources ?? [],
DeniedSources = deniedSources ?? [],
MaxBundleSizeMb = maxSizeMb,
MaxItemsPerBundle = maxItems,
RequireSignature = true,
AllowedSigners = [],
Enabled = enabled
};
}
#endregion
}

View File

@@ -476,9 +476,127 @@ components:
type: array
traceId:
type: string
cacheHit:
type: boolean
description: Whether the decision was served from cache.
cacheSource:
type: string
enum:
- none
- inMemory
- redis
description: Source of cached data (none for fresh computation, inMemory for L1 cache, redis for Provcache L2).
executionTimeMs:
type: integer
description: Time taken to evaluate the policy in milliseconds.
required:
- decision
type: object
provcache.TrustScoreComponent:
type: object
required:
- score
- weight
properties:
score:
type: integer
minimum: 0
maximum: 100
description: Component score (0-100).
weight:
type: number
format: float
minimum: 0
maximum: 1
description: Weight of this component in the total score (0.0-1.0).
provcache.TrustScoreBreakdown:
type: object
required:
- reachability
- sbomCompleteness
- vexCoverage
- policyFreshness
- signerTrust
properties:
reachability:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: Reachability evidence contribution (weight 25%).
sbomCompleteness:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: SBOM completeness contribution (weight 20%).
vexCoverage:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: VEX statement coverage contribution (weight 20%).
policyFreshness:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: Policy freshness contribution (weight 15%).
signerTrust:
$ref: '#/components/schemas/provcache.TrustScoreComponent'
description: Signer trust contribution (weight 20%).
provcache.ReplaySeed:
type: object
required:
- feedIds
- ruleIds
properties:
feedIds:
type: array
items:
type: string
description: Advisory feed identifiers used in evaluation.
ruleIds:
type: array
items:
type: string
description: Policy rule identifiers used in evaluation.
frozenEpoch:
type: string
format: date-time
description: Optional frozen epoch timestamp for deterministic replay.
provcache.DecisionDigest:
type: object
required:
- digestVersion
- veriKey
- verdictHash
- proofRoot
- replaySeed
- createdAt
- expiresAt
- trustScore
properties:
digestVersion:
type: string
description: Schema version of this digest format.
example: v1
veriKey:
type: string
description: Composite cache key that uniquely identifies the provenance decision context.
example: sha256:abc123...
verdictHash:
type: string
description: Hash of sorted dispositions from the evaluation result.
proofRoot:
type: string
description: Merkle root of all evidence chunks used in this decision.
replaySeed:
$ref: '#/components/schemas/provcache.ReplaySeed'
createdAt:
type: string
format: date-time
description: UTC timestamp when this digest was created.
expiresAt:
type: string
format: date-time
description: UTC timestamp when this digest expires.
trustScore:
type: integer
minimum: 0
maximum: 100
description: Composite trust score (0-100) indicating decision confidence.
trustScoreBreakdown:
$ref: '#/components/schemas/provcache.TrustScoreBreakdown'
description: Breakdown of trust score by component.
scheduler.QueueStatus:
properties:
depth:

View File

@@ -0,0 +1,291 @@
// ----------------------------------------------------------------------------
// Copyright (c) 2025 StellaOps contributors. All rights reserved.
// SPDX-License-Identifier: AGPL-3.0-or-later
// ----------------------------------------------------------------------------
using StellaOps.Provcache;
using StellaOps.Provcache.Oci;
namespace StellaOps.ExportCenter.Core.Provcache;
/// <summary>
/// Integrates Provcache OCI attestations with the ExportCenter OCI push workflow.
/// Enables automatic attachment of DecisionDigest attestations to container images.
/// </summary>
public sealed class ProvcacheOciExporter : IProvcacheOciExporter
{
private readonly IProvcacheOciAttestationBuilder _attestationBuilder;
private readonly TimeProvider _timeProvider;
/// <summary>
/// Initializes a new instance of the <see cref="ProvcacheOciExporter"/> class.
/// </summary>
public ProvcacheOciExporter(
IProvcacheOciAttestationBuilder attestationBuilder,
TimeProvider? timeProvider = null)
{
_attestationBuilder = attestationBuilder ?? throw new ArgumentNullException(nameof(attestationBuilder));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
public ProvcacheExportLayerContent CreateAttestationLayer(ProvcacheExportRequest request)
{
ArgumentNullException.ThrowIfNull(request);
ValidateRequest(request);
// Build the attestation
var attestationRequest = new ProvcacheOciAttestationRequest
{
ArtifactReference = request.ArtifactReference,
ArtifactDigest = request.ArtifactDigest,
DecisionDigest = request.DecisionDigest,
InputManifest = request.InputManifest,
VerdictSummary = request.VerdictSummary,
TenantId = request.TenantId,
Scope = request.Scope
};
var attachment = _attestationBuilder.CreateAttachment(attestationRequest);
return new ProvcacheExportLayerContent(
MediaType: ProvcachePredicateTypes.MediaType,
Content: attachment.PayloadBytes,
ContentJson: attachment.Payload,
Digest: ComputeDigest(attachment.PayloadBytes),
Annotations: attachment.Annotations);
}
/// <inheritdoc/>
public ProvcacheExportResult BuildExport(ProvcacheExportRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var layer = CreateAttestationLayer(request);
var manifest = BuildManifest(request, layer);
return new ProvcacheExportResult(
Layer: layer,
Manifest: manifest,
ArtifactReference: request.ArtifactReference,
SubjectDigest: request.ArtifactDigest,
CreatedAt: _timeProvider.GetUtcNow());
}
/// <inheritdoc/>
public bool ShouldAttachAttestation(ProvcacheAttachmentPolicy policy, DecisionDigest digest)
{
ArgumentNullException.ThrowIfNull(policy);
ArgumentNullException.ThrowIfNull(digest);
// Check if attestations are enabled
if (!policy.Enabled)
{
return false;
}
// Check trust score threshold
if (digest.TrustScore < policy.MinimumTrustScore)
{
return false;
}
// Check if the digest is expired
if (digest.ExpiresAt <= _timeProvider.GetUtcNow())
{
return policy.AttachExpired;
}
return true;
}
private static void ValidateRequest(ProvcacheExportRequest request)
{
if (string.IsNullOrWhiteSpace(request.ArtifactReference))
{
throw new ArgumentException("Artifact reference is required.", nameof(request));
}
if (string.IsNullOrWhiteSpace(request.ArtifactDigest))
{
throw new ArgumentException("Artifact digest is required.", nameof(request));
}
if (request.DecisionDigest is null)
{
throw new ArgumentException("DecisionDigest is required.", nameof(request));
}
}
private static string ComputeDigest(byte[] content)
{
var hash = System.Security.Cryptography.SHA256.HashData(content);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private ProvcacheExportManifest BuildManifest(
ProvcacheExportRequest request,
ProvcacheExportLayerContent layer)
{
var annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
["org.opencontainers.image.created"] = _timeProvider.GetUtcNow().ToString("O"),
["org.opencontainers.image.title"] = "stellaops.provcache.decision",
["org.opencontainers.image.description"] = "Provcache decision attestation",
["stellaops.provcache.verikey"] = request.DecisionDigest.VeriKey,
["stellaops.provcache.trust-score"] = request.DecisionDigest.TrustScore.ToString()
};
if (!string.IsNullOrWhiteSpace(request.TenantId))
{
annotations["stellaops.tenant"] = request.TenantId;
}
return new ProvcacheExportManifest(
SchemaVersion: 2,
MediaType: "application/vnd.oci.image.manifest.v1+json",
ArtifactType: ProvcachePredicateTypes.MediaType,
SubjectDigest: request.ArtifactDigest,
LayerDigest: layer.Digest,
LayerMediaType: layer.MediaType,
LayerSize: layer.Content.Length,
Annotations: annotations);
}
}
/// <summary>
/// Interface for exporting Provcache OCI attestations.
/// </summary>
public interface IProvcacheOciExporter
{
/// <summary>
/// Creates an attestation layer content for pushing to OCI registry.
/// </summary>
ProvcacheExportLayerContent CreateAttestationLayer(ProvcacheExportRequest request);
/// <summary>
/// Builds a complete export result with manifest and layer.
/// </summary>
ProvcacheExportResult BuildExport(ProvcacheExportRequest request);
/// <summary>
/// Determines if an attestation should be attached based on policy.
/// </summary>
bool ShouldAttachAttestation(ProvcacheAttachmentPolicy policy, DecisionDigest digest);
}
/// <summary>
/// Request for exporting a Provcache attestation.
/// </summary>
public sealed record ProvcacheExportRequest
{
/// <summary>
/// OCI artifact reference to attach the attestation to.
/// </summary>
public required string ArtifactReference { get; init; }
/// <summary>
/// Digest of the artifact.
/// </summary>
public required string ArtifactDigest { get; init; }
/// <summary>
/// The DecisionDigest to export.
/// </summary>
public required DecisionDigest DecisionDigest { get; init; }
/// <summary>
/// Optional: Full InputManifest for detailed provenance.
/// </summary>
public InputManifest? InputManifest { get; init; }
/// <summary>
/// Optional: Summary of verdicts.
/// </summary>
public ProvcacheVerdictSummary? VerdictSummary { get; init; }
/// <summary>
/// Optional: Tenant identifier.
/// </summary>
public string? TenantId { get; init; }
/// <summary>
/// Optional: Scope identifier.
/// </summary>
public string? Scope { get; init; }
}
/// <summary>
/// Layer content for OCI push.
/// </summary>
public sealed record ProvcacheExportLayerContent(
string MediaType,
byte[] Content,
string ContentJson,
string Digest,
IReadOnlyDictionary<string, string> Annotations);
/// <summary>
/// Complete export result for OCI push.
/// </summary>
public sealed record ProvcacheExportResult(
ProvcacheExportLayerContent Layer,
ProvcacheExportManifest Manifest,
string ArtifactReference,
string SubjectDigest,
DateTimeOffset CreatedAt);
/// <summary>
/// OCI manifest for the attestation artifact.
/// </summary>
public sealed record ProvcacheExportManifest(
int SchemaVersion,
string MediaType,
string ArtifactType,
string SubjectDigest,
string LayerDigest,
string LayerMediaType,
long LayerSize,
IReadOnlyDictionary<string, string> Annotations);
/// <summary>
/// Policy for automatic attestation attachment.
/// </summary>
public sealed record ProvcacheAttachmentPolicy
{
/// <summary>
/// Whether automatic attestation is enabled.
/// </summary>
public bool Enabled { get; init; } = true;
/// <summary>
/// Minimum trust score required for attestation attachment.
/// </summary>
public int MinimumTrustScore { get; init; } = 0;
/// <summary>
/// Whether to attach attestations for expired decisions.
/// </summary>
public bool AttachExpired { get; init; } = false;
/// <summary>
/// Registry patterns to include (glob patterns).
/// Empty means all registries.
/// </summary>
public IReadOnlyList<string> IncludeRegistries { get; init; } = [];
/// <summary>
/// Registry patterns to exclude (glob patterns).
/// </summary>
public IReadOnlyList<string> ExcludeRegistries { get; init; } = [];
/// <summary>
/// Creates a default policy with attestations enabled.
/// </summary>
public static ProvcacheAttachmentPolicy Default => new();
/// <summary>
/// Creates a disabled policy.
/// </summary>
public static ProvcacheAttachmentPolicy Disabled => new() { Enabled = false };
}

View File

@@ -0,0 +1,183 @@
// ----------------------------------------------------------------------------
// Copyright (c) 2025 StellaOps contributors. All rights reserved.
// SPDX-License-Identifier: AGPL-3.0-or-later
// ----------------------------------------------------------------------------
namespace StellaOps.ExportCenter.Core.Provcache;
/// <summary>
/// Configuration options for Provcache OCI attestation features.
/// </summary>
public sealed class ProvcacheOciOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Provcache:Oci";
/// <summary>
/// Whether OCI attestation features are enabled.
/// Default: true.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Whether to automatically attach attestations to pushed images.
/// Default: true.
/// </summary>
public bool AutoAttach { get; set; } = true;
/// <summary>
/// Minimum trust score required for automatic attestation attachment.
/// Default: 0 (attach all decisions).
/// </summary>
public int MinimumTrustScore { get; set; } = 0;
/// <summary>
/// Whether to attach attestations for expired decisions.
/// Default: false.
/// </summary>
public bool AttachExpiredDecisions { get; set; } = false;
/// <summary>
/// Whether to sign attestations with DSSE.
/// Default: true.
/// </summary>
public bool SignAttestations { get; set; } = true;
/// <summary>
/// Key identifier for signing attestations.
/// If null, uses the default signing key.
/// </summary>
public string? SigningKeyId { get; set; }
/// <summary>
/// Whether to include the full InputManifest in attestations.
/// Default: true.
/// </summary>
public bool IncludeInputManifest { get; set; } = true;
/// <summary>
/// Whether to include verdict summary in attestations.
/// Default: true.
/// </summary>
public bool IncludeVerdictSummary { get; set; } = true;
/// <summary>
/// Registry patterns to include for auto-attach (glob patterns).
/// Empty means all registries.
/// Example: ["ghcr.io/*", "docker.io/stellaops/*"]
/// </summary>
public string[] IncludeRegistries { get; set; } = [];
/// <summary>
/// Registry patterns to exclude from auto-attach (glob patterns).
/// Example: ["registry.internal/*"]
/// </summary>
public string[] ExcludeRegistries { get; set; } = [];
/// <summary>
/// Timeout for OCI push operations in seconds.
/// Default: 60.
/// </summary>
public int PushTimeoutSeconds { get; set; } = 60;
/// <summary>
/// Number of retries for failed push operations.
/// Default: 3.
/// </summary>
public int PushRetryCount { get; set; } = 3;
/// <summary>
/// Delay between retries in milliseconds.
/// Default: 1000.
/// </summary>
public int PushRetryDelayMs { get; set; } = 1000;
/// <summary>
/// Whether to fail the overall operation if attestation push fails.
/// Default: false (attestation push failures are logged but don't block).
/// </summary>
public bool FailOnAttestationError { get; set; } = false;
/// <summary>
/// Additional annotations to add to all attestations.
/// </summary>
public Dictionary<string, string> CustomAnnotations { get; set; } = [];
/// <summary>
/// Converts this options instance to an attachment policy.
/// </summary>
public ProvcacheAttachmentPolicy ToAttachmentPolicy() => new()
{
Enabled = Enabled && AutoAttach,
MinimumTrustScore = MinimumTrustScore,
AttachExpired = AttachExpiredDecisions,
IncludeRegistries = IncludeRegistries,
ExcludeRegistries = ExcludeRegistries
};
}
/// <summary>
/// Validates <see cref="ProvcacheOciOptions"/>.
/// </summary>
public static class ProvcacheOciOptionsValidator
{
/// <summary>
/// Validates the options and returns any validation errors.
/// </summary>
public static IEnumerable<string> Validate(ProvcacheOciOptions options)
{
ArgumentNullException.ThrowIfNull(options);
if (options.MinimumTrustScore is < 0 or > 100)
{
yield return "MinimumTrustScore must be between 0 and 100.";
}
if (options.PushTimeoutSeconds <= 0)
{
yield return "PushTimeoutSeconds must be greater than 0.";
}
if (options.PushRetryCount < 0)
{
yield return "PushRetryCount must be non-negative.";
}
if (options.PushRetryDelayMs < 0)
{
yield return "PushRetryDelayMs must be non-negative.";
}
// Validate registry patterns are valid globs
foreach (var pattern in options.IncludeRegistries)
{
if (string.IsNullOrWhiteSpace(pattern))
{
yield return "IncludeRegistries contains an empty pattern.";
}
}
foreach (var pattern in options.ExcludeRegistries)
{
if (string.IsNullOrWhiteSpace(pattern))
{
yield return "ExcludeRegistries contains an empty pattern.";
}
}
}
/// <summary>
/// Throws if the options are invalid.
/// </summary>
public static void ValidateAndThrow(ProvcacheOciOptions options)
{
var errors = Validate(options).ToList();
if (errors.Count > 0)
{
throw new InvalidOperationException(
$"Invalid ProvcacheOciOptions: {string.Join("; ", errors)}");
}
}
}

View File

@@ -0,0 +1,492 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright © 2025 StellaOps
// Sprint: SPRINT_8200_0012_0004_api_endpoints
// Task: API-8200-001 - Define request/response DTOs for EWS scoring
using System.Text.Json.Serialization;
namespace StellaOps.Findings.Ledger.WebService.Contracts;
#region Request DTOs
/// <summary>
/// Request to calculate score for a single finding.
/// </summary>
public sealed record CalculateScoreRequest
{
/// <summary>
/// Force recalculation even if cached score exists.
/// </summary>
public bool ForceRecalculate { get; init; }
/// <summary>
/// Include detailed breakdown in response.
/// </summary>
public bool IncludeBreakdown { get; init; } = true;
/// <summary>
/// Specific policy version to use. Null = use latest.
/// </summary>
public string? PolicyVersion { get; init; }
}
/// <summary>
/// Request to calculate scores for multiple findings.
/// </summary>
public sealed record CalculateScoresBatchRequest
{
/// <summary>
/// Finding IDs to calculate scores for. Max 100.
/// </summary>
public required IReadOnlyList<string> FindingIds { get; init; }
/// <summary>
/// Force recalculation even if cached scores exist.
/// </summary>
public bool ForceRecalculate { get; init; }
/// <summary>
/// Include detailed breakdown in response.
/// </summary>
public bool IncludeBreakdown { get; init; } = true;
/// <summary>
/// Specific policy version to use. Null = use latest.
/// </summary>
public string? PolicyVersion { get; init; }
}
/// <summary>
/// Request to register a webhook for score changes.
/// </summary>
public sealed record RegisterWebhookRequest
{
/// <summary>
/// Webhook URL to call on score changes.
/// </summary>
public required string Url { get; init; }
/// <summary>
/// Optional secret for HMAC-SHA256 signature.
/// </summary>
public string? Secret { get; init; }
/// <summary>
/// Finding ID patterns to watch. Empty = all findings.
/// </summary>
public IReadOnlyList<string>? FindingPatterns { get; init; }
/// <summary>
/// Minimum score change to trigger webhook.
/// </summary>
public int MinScoreChange { get; init; } = 5;
/// <summary>
/// Whether to trigger on bucket changes.
/// </summary>
public bool TriggerOnBucketChange { get; init; } = true;
}
#endregion
#region Response DTOs
/// <summary>
/// Evidence-weighted score calculation result.
/// </summary>
public sealed record EvidenceWeightedScoreResponse
{
/// <summary>
/// Finding identifier.
/// </summary>
public required string FindingId { get; init; }
/// <summary>
/// Calculated score (0-100). Higher = more urgent.
/// </summary>
public required int Score { get; init; }
/// <summary>
/// Action bucket: ActNow, ScheduleNext, Investigate, Watchlist.
/// </summary>
public required string Bucket { get; init; }
/// <summary>
/// Normalized input values (0-1 scale).
/// </summary>
public EvidenceInputsDto? Inputs { get; init; }
/// <summary>
/// Applied evidence weights.
/// </summary>
public EvidenceWeightsDto? Weights { get; init; }
/// <summary>
/// Active flags affecting the score.
/// </summary>
public IReadOnlyList<string>? Flags { get; init; }
/// <summary>
/// Human-readable explanations for each factor.
/// </summary>
public IReadOnlyList<string>? Explanations { get; init; }
/// <summary>
/// Applied guardrails (caps/floors).
/// </summary>
public AppliedCapsDto? Caps { get; init; }
/// <summary>
/// Policy digest used for calculation.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// When the score was calculated.
/// </summary>
public required DateTimeOffset CalculatedAt { get; init; }
/// <summary>
/// When the cached score expires.
/// </summary>
public DateTimeOffset? CachedUntil { get; init; }
/// <summary>
/// Whether this result came from cache.
/// </summary>
public bool FromCache { get; init; }
}
/// <summary>
/// Normalized evidence input values.
/// </summary>
public sealed record EvidenceInputsDto
{
/// <summary>Reachability (0-1)</summary>
[JsonPropertyName("rch")]
public double Reachability { get; init; }
/// <summary>Runtime signal (0-1)</summary>
[JsonPropertyName("rts")]
public double Runtime { get; init; }
/// <summary>Backport availability (0-1)</summary>
[JsonPropertyName("bkp")]
public double Backport { get; init; }
/// <summary>Exploit likelihood (0-1)</summary>
[JsonPropertyName("xpl")]
public double Exploit { get; init; }
/// <summary>Source trust (0-1)</summary>
[JsonPropertyName("src")]
public double SourceTrust { get; init; }
/// <summary>Mitigation effectiveness (0-1)</summary>
[JsonPropertyName("mit")]
public double Mitigation { get; init; }
}
/// <summary>
/// Evidence weight configuration.
/// </summary>
public sealed record EvidenceWeightsDto
{
/// <summary>Reachability weight</summary>
[JsonPropertyName("rch")]
public double Reachability { get; init; }
/// <summary>Runtime signal weight</summary>
[JsonPropertyName("rts")]
public double Runtime { get; init; }
/// <summary>Backport weight</summary>
[JsonPropertyName("bkp")]
public double Backport { get; init; }
/// <summary>Exploit weight</summary>
[JsonPropertyName("xpl")]
public double Exploit { get; init; }
/// <summary>Source trust weight</summary>
[JsonPropertyName("src")]
public double SourceTrust { get; init; }
/// <summary>Mitigation weight</summary>
[JsonPropertyName("mit")]
public double Mitigation { get; init; }
}
/// <summary>
/// Applied guardrail caps and floors.
/// </summary>
public sealed record AppliedCapsDto
{
/// <summary>Speculative cap applied (no runtime evidence).</summary>
public bool SpeculativeCap { get; init; }
/// <summary>Not-affected cap applied (VEX status).</summary>
public bool NotAffectedCap { get; init; }
/// <summary>Runtime floor applied (observed in production).</summary>
public bool RuntimeFloor { get; init; }
}
/// <summary>
/// Batch score calculation result.
/// </summary>
public sealed record CalculateScoresBatchResponse
{
/// <summary>
/// Individual score results.
/// </summary>
public required IReadOnlyList<EvidenceWeightedScoreResponse> Results { get; init; }
/// <summary>
/// Summary statistics.
/// </summary>
public required BatchSummaryDto Summary { get; init; }
/// <summary>
/// Errors for failed calculations.
/// </summary>
public IReadOnlyList<ScoringErrorDto>? Errors { get; init; }
/// <summary>
/// Policy digest used for all calculations.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// When the batch was calculated.
/// </summary>
public required DateTimeOffset CalculatedAt { get; init; }
}
/// <summary>
/// Batch calculation summary.
/// </summary>
public sealed record BatchSummaryDto
{
/// <summary>Total findings processed.</summary>
public required int Total { get; init; }
/// <summary>Successful calculations.</summary>
public required int Succeeded { get; init; }
/// <summary>Failed calculations.</summary>
public required int Failed { get; init; }
/// <summary>Score distribution by bucket.</summary>
public required BucketDistributionDto ByBucket { get; init; }
/// <summary>Average score across all findings.</summary>
public required double AverageScore { get; init; }
/// <summary>Total calculation time in milliseconds.</summary>
public required double CalculationTimeMs { get; init; }
}
/// <summary>
/// Score distribution by bucket.
/// </summary>
public sealed record BucketDistributionDto
{
public int ActNow { get; init; }
public int ScheduleNext { get; init; }
public int Investigate { get; init; }
public int Watchlist { get; init; }
}
/// <summary>
/// Score history entry.
/// </summary>
public sealed record ScoreHistoryEntry
{
/// <summary>Score value at this point in time.</summary>
public required int Score { get; init; }
/// <summary>Bucket at this point in time.</summary>
public required string Bucket { get; init; }
/// <summary>Policy digest used.</summary>
public required string PolicyDigest { get; init; }
/// <summary>When calculated.</summary>
public required DateTimeOffset CalculatedAt { get; init; }
/// <summary>What triggered recalculation.</summary>
public required string Trigger { get; init; }
/// <summary>Which factors changed since previous calculation.</summary>
public IReadOnlyList<string>? ChangedFactors { get; init; }
}
/// <summary>
/// Score history response.
/// </summary>
public sealed record ScoreHistoryResponse
{
/// <summary>Finding ID.</summary>
public required string FindingId { get; init; }
/// <summary>History entries.</summary>
public required IReadOnlyList<ScoreHistoryEntry> History { get; init; }
/// <summary>Pagination information.</summary>
public required PaginationDto Pagination { get; init; }
}
/// <summary>
/// Pagination metadata.
/// </summary>
public sealed record PaginationDto
{
/// <summary>Whether more results are available.</summary>
public required bool HasMore { get; init; }
/// <summary>Cursor for next page. Null if no more pages.</summary>
public string? NextCursor { get; init; }
}
/// <summary>
/// Scoring policy response.
/// </summary>
public sealed record ScoringPolicyResponse
{
/// <summary>Policy version identifier.</summary>
public required string Version { get; init; }
/// <summary>Policy content digest.</summary>
public required string Digest { get; init; }
/// <summary>When this policy became active.</summary>
public required DateTimeOffset ActiveSince { get; init; }
/// <summary>Environment (production, staging, etc.).</summary>
public required string Environment { get; init; }
/// <summary>Evidence weights.</summary>
public required EvidenceWeightsDto Weights { get; init; }
/// <summary>Guardrail configuration.</summary>
public required GuardrailsConfigDto Guardrails { get; init; }
/// <summary>Bucket thresholds.</summary>
public required BucketThresholdsDto Buckets { get; init; }
}
/// <summary>
/// Guardrail configuration.
/// </summary>
public sealed record GuardrailsConfigDto
{
public required GuardrailDto NotAffectedCap { get; init; }
public required GuardrailDto RuntimeFloor { get; init; }
public required GuardrailDto SpeculativeCap { get; init; }
}
/// <summary>
/// Individual guardrail settings.
/// </summary>
public sealed record GuardrailDto
{
public required bool Enabled { get; init; }
public int? MaxScore { get; init; }
public int? MinScore { get; init; }
}
/// <summary>
/// Bucket threshold configuration.
/// </summary>
public sealed record BucketThresholdsDto
{
/// <summary>Minimum score for ActNow bucket.</summary>
public required int ActNowMin { get; init; }
/// <summary>Minimum score for ScheduleNext bucket.</summary>
public required int ScheduleNextMin { get; init; }
/// <summary>Minimum score for Investigate bucket.</summary>
public required int InvestigateMin { get; init; }
}
/// <summary>
/// Webhook registration response.
/// </summary>
public sealed record WebhookResponse
{
/// <summary>Webhook ID.</summary>
public required Guid Id { get; init; }
/// <summary>Webhook URL.</summary>
public required string Url { get; init; }
/// <summary>Whether secret is configured.</summary>
public required bool HasSecret { get; init; }
/// <summary>Finding patterns being watched.</summary>
public IReadOnlyList<string>? FindingPatterns { get; init; }
/// <summary>Minimum score change threshold.</summary>
public required int MinScoreChange { get; init; }
/// <summary>Whether to trigger on bucket changes.</summary>
public required bool TriggerOnBucketChange { get; init; }
/// <summary>When webhook was created.</summary>
public required DateTimeOffset CreatedAt { get; init; }
}
#endregion
#region Error DTOs
/// <summary>
/// Scoring error information.
/// </summary>
public sealed record ScoringErrorDto
{
/// <summary>Finding ID that failed.</summary>
public required string FindingId { get; init; }
/// <summary>Error code.</summary>
public required string Code { get; init; }
/// <summary>Error message.</summary>
public required string Message { get; init; }
}
/// <summary>
/// Scoring error response.
/// </summary>
public sealed record ScoringErrorResponse
{
/// <summary>Error code.</summary>
public required string Code { get; init; }
/// <summary>Error message.</summary>
public required string Message { get; init; }
/// <summary>Additional details.</summary>
public IDictionary<string, object>? Details { get; init; }
/// <summary>Trace ID for debugging.</summary>
public string? TraceId { get; init; }
}
/// <summary>
/// Standard error codes for scoring operations.
/// </summary>
public static class ScoringErrorCodes
{
public const string FindingNotFound = "SCORING_FINDING_NOT_FOUND";
public const string EvidenceNotAvailable = "SCORING_EVIDENCE_NOT_AVAILABLE";
public const string PolicyNotFound = "SCORING_POLICY_NOT_FOUND";
public const string CalculationFailed = "SCORING_CALCULATION_FAILED";
public const string BatchTooLarge = "SCORING_BATCH_TOO_LARGE";
public const string RateLimitExceeded = "SCORING_RATE_LIMIT_EXCEEDED";
public const string InvalidRequest = "SCORING_INVALID_REQUEST";
}
#endregion

View File

@@ -16,9 +16,9 @@ public static class EvidenceGraphEndpoints
// GET /api/v1/findings/{findingId}/evidence-graph
group.MapGet("/{findingId:guid}/evidence-graph", async Task<Results<Ok<EvidenceGraphResponse>, NotFound>> (
Guid findingId,
[FromQuery] bool includeContent = false,
IEvidenceGraphBuilder builder,
CancellationToken ct) =>
CancellationToken ct,
[FromQuery] bool includeContent = false) =>
{
var graph = await builder.BuildAsync(findingId, ct);
return graph is not null

View File

@@ -31,13 +31,13 @@ public static class FindingSummaryEndpoints
// GET /api/v1/findings/summaries
group.MapGet("/summaries", async Task<Ok<FindingSummaryPage>> (
IFindingSummaryService service,
CancellationToken ct,
[FromQuery] int page = 1,
[FromQuery] int pageSize = 50,
[FromQuery] string? status = null,
[FromQuery] string? severity = null,
[FromQuery] decimal? minConfidence = null,
IFindingSummaryService service,
CancellationToken ct) =>
[FromQuery] decimal? minConfidence = null) =>
{
var filter = new FindingSummaryFilter
{

View File

@@ -15,9 +15,9 @@ public static class ReachabilityMapEndpoints
// GET /api/v1/findings/{findingId}/reachability-map
group.MapGet("/{findingId:guid}/reachability-map", async Task<Results<Ok<ReachabilityMiniMap>, NotFound>> (
Guid findingId,
[FromQuery] int maxPaths = 10,
IReachabilityMapService service,
CancellationToken ct) =>
CancellationToken ct,
[FromQuery] int maxPaths = 10) =>
{
var map = await service.GetMiniMapAsync(findingId, maxPaths, ct);
return map is not null

View File

@@ -15,11 +15,11 @@ public static class RuntimeTimelineEndpoints
// GET /api/v1/findings/{findingId}/runtime-timeline
group.MapGet("/{findingId:guid}/runtime-timeline", async Task<Results<Ok<RuntimeTimeline>, NotFound>> (
Guid findingId,
[FromQuery] DateTimeOffset? from,
[FromQuery] DateTimeOffset? to,
[FromQuery] int bucketHours = 1,
IRuntimeTimelineService service,
CancellationToken ct) =>
CancellationToken ct,
[FromQuery] DateTimeOffset? from = null,
[FromQuery] DateTimeOffset? to = null,
[FromQuery] int bucketHours = 1) =>
{
var options = new TimelineOptions
{

View File

@@ -0,0 +1,221 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright © 2025 StellaOps
// Sprint: SPRINT_8200_0012_0004_api_endpoints
// Task: API-8200-003 to API-8200-030 - Scoring API endpoints
using System.Diagnostics;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Findings.Ledger.WebService.Contracts;
using StellaOps.Findings.Ledger.WebService.Services;
namespace StellaOps.Findings.Ledger.WebService.Endpoints;
/// <summary>
/// Evidence-Weighted Score API endpoints.
/// </summary>
public static class ScoringEndpoints
{
private const int MaxBatchSize = 100;
// Authorization policy names (must match Program.cs)
private const string ScoringReadPolicy = "scoring.read";
private const string ScoringWritePolicy = "scoring.write";
public static void MapScoringEndpoints(this WebApplication app)
{
var findingsGroup = app.MapGroup("/api/v1/findings")
.WithTags("Scoring");
var scoringGroup = app.MapGroup("/api/v1/scoring")
.WithTags("Scoring");
// POST /api/v1/findings/{findingId}/score - Calculate score
// Rate limit: 100/min (via API Gateway)
findingsGroup.MapPost("/{findingId}/score", CalculateScore)
.WithName("CalculateFindingScore")
.WithDescription("Calculate evidence-weighted score for a finding")
.RequireAuthorization(ScoringWritePolicy)
.Produces<EvidenceWeightedScoreResponse>(200)
.Produces<ScoringErrorResponse>(400)
.Produces<ScoringErrorResponse>(404)
.Produces(429);
// GET /api/v1/findings/{findingId}/score - Get cached score
// Rate limit: 1000/min (via API Gateway)
findingsGroup.MapGet("/{findingId}/score", GetCachedScore)
.WithName("GetFindingScore")
.WithDescription("Get cached evidence-weighted score for a finding")
.RequireAuthorization(ScoringReadPolicy)
.Produces<EvidenceWeightedScoreResponse>(200)
.Produces(404);
// POST /api/v1/findings/scores - Batch calculate scores
// Rate limit: 10/min (via API Gateway)
findingsGroup.MapPost("/scores", CalculateScoresBatch)
.WithName("CalculateFindingScoresBatch")
.WithDescription("Calculate evidence-weighted scores for multiple findings")
.RequireAuthorization(ScoringWritePolicy)
.Produces<CalculateScoresBatchResponse>(200)
.Produces<ScoringErrorResponse>(400)
.Produces(429);
// GET /api/v1/findings/{findingId}/score-history - Get score history
// Rate limit: 100/min (via API Gateway)
findingsGroup.MapGet("/{findingId}/score-history", GetScoreHistory)
.WithName("GetFindingScoreHistory")
.WithDescription("Get score history for a finding")
.RequireAuthorization(ScoringReadPolicy)
.Produces<ScoreHistoryResponse>(200)
.Produces(404);
// GET /api/v1/scoring/policy - Get active policy
// Rate limit: 100/min (via API Gateway)
scoringGroup.MapGet("/policy", GetActivePolicy)
.WithName("GetActiveScoringPolicy")
.WithDescription("Get the active scoring policy configuration")
.RequireAuthorization(ScoringReadPolicy)
.Produces<ScoringPolicyResponse>(200);
// GET /api/v1/scoring/policy/{version} - Get specific policy version
// Rate limit: 100/min (via API Gateway)
scoringGroup.MapGet("/policy/{version}", GetPolicyVersion)
.WithName("GetScoringPolicyVersion")
.WithDescription("Get a specific scoring policy version")
.RequireAuthorization(ScoringReadPolicy)
.Produces<ScoringPolicyResponse>(200)
.Produces(404);
}
private static async Task<Results<Ok<EvidenceWeightedScoreResponse>, NotFound<ScoringErrorResponse>, BadRequest<ScoringErrorResponse>>> CalculateScore(
string findingId,
[FromBody] CalculateScoreRequest? request,
IFindingScoringService service,
CancellationToken ct)
{
request ??= new CalculateScoreRequest();
try
{
var result = await service.CalculateScoreAsync(findingId, request, ct);
if (result is null)
{
return TypedResults.NotFound(new ScoringErrorResponse
{
Code = ScoringErrorCodes.FindingNotFound,
Message = $"Finding '{findingId}' not found or no evidence available",
TraceId = Activity.Current?.Id
});
}
return TypedResults.Ok(result);
}
catch (Exception ex)
{
return TypedResults.BadRequest(new ScoringErrorResponse
{
Code = ScoringErrorCodes.CalculationFailed,
Message = ex.Message,
TraceId = Activity.Current?.Id
});
}
}
private static async Task<Results<Ok<EvidenceWeightedScoreResponse>, NotFound>> GetCachedScore(
string findingId,
IFindingScoringService service,
CancellationToken ct)
{
var result = await service.GetCachedScoreAsync(findingId, ct);
if (result is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(result);
}
private static async Task<Results<Ok<CalculateScoresBatchResponse>, BadRequest<ScoringErrorResponse>>> CalculateScoresBatch(
[FromBody] CalculateScoresBatchRequest request,
IFindingScoringService service,
CancellationToken ct)
{
// Validate batch size
if (request.FindingIds.Count == 0)
{
return TypedResults.BadRequest(new ScoringErrorResponse
{
Code = ScoringErrorCodes.InvalidRequest,
Message = "At least one finding ID is required",
TraceId = Activity.Current?.Id
});
}
if (request.FindingIds.Count > MaxBatchSize)
{
return TypedResults.BadRequest(new ScoringErrorResponse
{
Code = ScoringErrorCodes.BatchTooLarge,
Message = $"Batch size {request.FindingIds.Count} exceeds maximum {MaxBatchSize}",
TraceId = Activity.Current?.Id
});
}
try
{
var result = await service.CalculateScoresBatchAsync(request, ct);
return TypedResults.Ok(result);
}
catch (Exception ex)
{
return TypedResults.BadRequest(new ScoringErrorResponse
{
Code = ScoringErrorCodes.CalculationFailed,
Message = ex.Message,
TraceId = Activity.Current?.Id
});
}
}
private static async Task<Results<Ok<ScoreHistoryResponse>, NotFound>> GetScoreHistory(
string findingId,
IFindingScoringService service,
CancellationToken ct,
[FromQuery] DateTimeOffset? from = null,
[FromQuery] DateTimeOffset? to = null,
[FromQuery] int limit = 50,
[FromQuery] string? cursor = null)
{
limit = Math.Clamp(limit, 1, 100);
var result = await service.GetScoreHistoryAsync(findingId, from, to, limit, cursor, ct);
if (result is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(result);
}
private static async Task<Ok<ScoringPolicyResponse>> GetActivePolicy(
IFindingScoringService service,
CancellationToken ct)
{
var policy = await service.GetActivePolicyAsync(ct);
return TypedResults.Ok(policy);
}
private static async Task<Results<Ok<ScoringPolicyResponse>, NotFound>> GetPolicyVersion(
string version,
IFindingScoringService service,
CancellationToken ct)
{
var policy = await service.GetPolicyVersionAsync(version, ct);
if (policy is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(policy);
}
}

View File

@@ -0,0 +1,175 @@
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Findings.Ledger.WebService.Contracts;
using StellaOps.Findings.Ledger.WebService.Services;
namespace StellaOps.Findings.Ledger.WebService.Endpoints;
/// <summary>
/// Webhook management endpoints.
/// Sprint: SPRINT_8200_0012_0004 - Wave 6
/// </summary>
public static class WebhookEndpoints
{
// Authorization policy name (must match Program.cs)
private const string ScoringAdminPolicy = "scoring.admin";
public static void MapWebhookEndpoints(this IEndpointRouteBuilder app)
{
var group = app.MapGroup("/api/v1/scoring/webhooks")
.WithTags("Webhooks");
// POST /api/v1/scoring/webhooks - Register webhook
// Rate limit: 10/min (via API Gateway)
group.MapPost("/", RegisterWebhook)
.WithName("RegisterScoringWebhook")
.WithDescription("Register a webhook for score change notifications")
.Produces<WebhookResponse>(StatusCodes.Status201Created)
.ProducesValidationProblem()
.RequireAuthorization(ScoringAdminPolicy);
// GET /api/v1/scoring/webhooks - List webhooks
// Rate limit: 10/min (via API Gateway)
group.MapGet("/", ListWebhooks)
.WithName("ListScoringWebhooks")
.WithDescription("List all registered webhooks")
.Produces<WebhookListResponse>(StatusCodes.Status200OK)
.RequireAuthorization(ScoringAdminPolicy);
// GET /api/v1/scoring/webhooks/{id} - Get webhook
// Rate limit: 10/min (via API Gateway)
group.MapGet("/{id:guid}", GetWebhook)
.WithName("GetScoringWebhook")
.WithDescription("Get a specific webhook by ID")
.Produces<WebhookResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScoringAdminPolicy);
// PUT /api/v1/scoring/webhooks/{id} - Update webhook
// Rate limit: 10/min (via API Gateway)
group.MapPut("/{id:guid}", UpdateWebhook)
.WithName("UpdateScoringWebhook")
.WithDescription("Update a webhook configuration")
.Produces<WebhookResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound)
.ProducesValidationProblem()
.RequireAuthorization(ScoringAdminPolicy);
// DELETE /api/v1/scoring/webhooks/{id} - Delete webhook
// Rate limit: 10/min (via API Gateway)
group.MapDelete("/{id:guid}", DeleteWebhook)
.WithName("DeleteScoringWebhook")
.WithDescription("Delete a webhook")
.Produces(StatusCodes.Status204NoContent)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScoringAdminPolicy);
}
private static Results<Created<WebhookResponse>, ValidationProblem> RegisterWebhook(
[FromBody] RegisterWebhookRequest request,
[FromServices] IWebhookStore store)
{
// Validate URL
if (!Uri.TryCreate(request.Url, UriKind.Absolute, out var uri) ||
(uri.Scheme != "http" && uri.Scheme != "https"))
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["url"] = ["Invalid webhook URL. Must be an absolute HTTP or HTTPS URL."]
});
}
var registration = store.Register(request);
var response = MapToResponse(registration);
return TypedResults.Created($"/api/v1/scoring/webhooks/{registration.Id}", response);
}
private static Ok<WebhookListResponse> ListWebhooks(
[FromServices] IWebhookStore store)
{
var webhooks = store.List();
var response = new WebhookListResponse
{
Webhooks = webhooks.Select(MapToResponse).ToList(),
TotalCount = webhooks.Count
};
return TypedResults.Ok(response);
}
private static Results<Ok<WebhookResponse>, NotFound> GetWebhook(
Guid id,
[FromServices] IWebhookStore store)
{
var webhook = store.Get(id);
if (webhook is null || !webhook.IsActive)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(MapToResponse(webhook));
}
private static Results<Ok<WebhookResponse>, NotFound, ValidationProblem> UpdateWebhook(
Guid id,
[FromBody] RegisterWebhookRequest request,
[FromServices] IWebhookStore store)
{
// Validate URL
if (!Uri.TryCreate(request.Url, UriKind.Absolute, out var uri) ||
(uri.Scheme != "http" && uri.Scheme != "https"))
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["url"] = ["Invalid webhook URL. Must be an absolute HTTP or HTTPS URL."]
});
}
if (!store.Update(id, request))
{
return TypedResults.NotFound();
}
var updated = store.Get(id);
return TypedResults.Ok(MapToResponse(updated!));
}
private static Results<NoContent, NotFound> DeleteWebhook(
Guid id,
[FromServices] IWebhookStore store)
{
if (!store.Delete(id))
{
return TypedResults.NotFound();
}
return TypedResults.NoContent();
}
private static WebhookResponse MapToResponse(WebhookRegistration registration)
{
return new WebhookResponse
{
Id = registration.Id,
Url = registration.Url,
HasSecret = !string.IsNullOrEmpty(registration.Secret),
FindingPatterns = registration.FindingPatterns,
MinScoreChange = registration.MinScoreChange,
TriggerOnBucketChange = registration.TriggerOnBucketChange,
CreatedAt = registration.CreatedAt
};
}
}
/// <summary>
/// Response for listing webhooks.
/// </summary>
public sealed record WebhookListResponse
{
/// <summary>List of webhooks.</summary>
public required IReadOnlyList<WebhookResponse> Webhooks { get; init; }
/// <summary>Total count of webhooks.</summary>
public required int TotalCount { get; init; }
}

View File

@@ -11,6 +11,7 @@ using StellaOps.Auth.ServerIntegration;
using StellaOps.Configuration;
using StellaOps.DependencyInjection;
using StellaOps.Findings.Ledger.Domain;
using Domain = StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
using StellaOps.Findings.Ledger.Infrastructure.Merkle;
@@ -23,18 +24,27 @@ using StellaOps.Findings.Ledger.Services;
using StellaOps.Findings.Ledger.WebService.Contracts;
using StellaOps.Findings.Ledger.WebService.Mappings;
using StellaOps.Findings.Ledger.WebService.Services;
using StellaOps.Findings.Ledger.WebService.Endpoints;
using StellaOps.Telemetry.Core;
using StellaOps.Findings.Ledger.Services.Security;
using StellaOps.Findings.Ledger;
using StellaOps.Findings.Ledger.Observability;
using StellaOps.Findings.Ledger.OpenApi;
using System.Text.Json.Nodes;
using System.Security.Cryptography;
using System.Text;
using System.Threading.RateLimiting;
using StellaOps.Findings.Ledger.Services.Incident;
using StellaOps.Router.AspNet;
const string LedgerWritePolicy = "ledger.events.write";
const string LedgerExportPolicy = "ledger.export.read";
// Scoring API policies (SPRINT_8200.0012.0004 - Wave 7)
const string ScoringReadPolicy = "scoring.read";
const string ScoringWritePolicy = "scoring.write";
const string ScoringAdminPolicy = "scoring.admin";
var builder = WebApplication.CreateBuilder(args);
builder.Configuration.AddStellaOpsDefaults(options =>
@@ -79,17 +89,15 @@ builder.Services.AddHealthChecks();
builder.Services.AddStellaOpsTelemetry(
builder.Configuration,
configureMetering: meterBuilder =>
serviceName: "StellaOps.Findings.Ledger",
configureMetrics: meterBuilder =>
{
meterBuilder.AddAspNetCoreInstrumentation();
meterBuilder.AddHttpClientInstrumentation();
},
configureTracing: tracerBuilder =>
{
tracerBuilder.AddAspNetCoreInstrumentation();
tracerBuilder.AddHttpClientInstrumentation();
meterBuilder.AddMeter("StellaOps.Findings.Ledger");
});
// Rate limiting is handled by API Gateway - see docs/modules/gateway/rate-limiting.md
// Endpoint-level rate limits: scoring-read (1000/min), scoring-calculate (100/min), scoring-batch (10/min), scoring-webhook (10/min)
builder.Services.AddIncidentMode(builder.Configuration);
builder.Services.AddStellaOpsResourceServerAuthentication(
@@ -140,6 +148,28 @@ builder.Services.AddAuthorization(options =>
policy.Requirements.Add(new StellaOpsScopeRequirement(scopes));
policy.AddAuthenticationSchemes(StellaOpsAuthenticationDefaults.AuthenticationScheme);
});
// Scoring API policies (SPRINT_8200.0012.0004 - Wave 7)
options.AddPolicy(ScoringReadPolicy, policy =>
{
policy.RequireAuthenticatedUser();
policy.Requirements.Add(new StellaOpsScopeRequirement(scopes));
policy.AddAuthenticationSchemes(StellaOpsAuthenticationDefaults.AuthenticationScheme);
});
options.AddPolicy(ScoringWritePolicy, policy =>
{
policy.RequireAuthenticatedUser();
policy.Requirements.Add(new StellaOpsScopeRequirement(scopes));
policy.AddAuthenticationSchemes(StellaOpsAuthenticationDefaults.AuthenticationScheme);
});
options.AddPolicy(ScoringAdminPolicy, policy =>
{
policy.RequireAuthenticatedUser();
policy.Requirements.Add(new StellaOpsScopeRequirement(scopes));
policy.AddAuthenticationSchemes(StellaOpsAuthenticationDefaults.AuthenticationScheme);
});
});
builder.Services.AddSingleton<ILedgerIncidentNotifier, LoggingLedgerIncidentNotifier>();
@@ -184,6 +214,19 @@ builder.Services.AddSingleton<VexConsensusService>();
// Alert and Decision services (SPRINT_3602)
builder.Services.AddSingleton<IAlertService, AlertService>();
builder.Services.AddSingleton<IDecisionService, DecisionService>();
builder.Services.AddSingleton<IEvidenceBundleService, EvidenceBundleService>();
// Evidence-Weighted Score services (SPRINT_8200.0012.0004)
builder.Services.AddSingleton<IScoreHistoryStore, InMemoryScoreHistoryStore>();
builder.Services.AddSingleton<IFindingScoringService, FindingScoringService>();
// Webhook services (SPRINT_8200.0012.0004 - Wave 6)
builder.Services.AddSingleton<IWebhookStore, InMemoryWebhookStore>();
builder.Services.AddSingleton<IWebhookDeliveryService, WebhookDeliveryService>();
builder.Services.AddHttpClient("webhook-delivery", client =>
{
client.Timeout = TimeSpan.FromSeconds(30);
});
// Stella Router integration
var routerOptions = builder.Configuration.GetSection("FindingsLedger:Router").Get<StellaRouterOptionsBase>();
@@ -414,7 +457,7 @@ app.MapGet("/ledger/export/vex", async Task<Results<FileStreamHttpResult, JsonHt
httpContext.Request.Query["status"].ToString(),
httpContext.Request.Query["statement_type"].ToString(),
exportQueryService.ClampPageSize(ParseInt(httpContext.Request.Query["page_size"])),
filtersHash: string.Empty,
FiltersHash: string.Empty,
PagingKey: null);
var filtersHash = exportQueryService.ComputeFiltersHash(request);
@@ -484,7 +527,7 @@ app.MapGet("/ledger/export/advisories", async Task<Results<FileStreamHttpResult,
cvssScoreMin,
cvssScoreMax,
exportQueryService.ClampPageSize(ParseInt(httpContext.Request.Query["page_size"])),
filtersHash: string.Empty,
FiltersHash: string.Empty,
PagingKey: null);
var filtersHash = exportQueryService.ComputeFiltersHash(request);
@@ -548,7 +591,7 @@ app.MapGet("/ledger/export/sboms", async Task<Results<FileStreamHttpResult, Json
ParseBool(httpContext.Request.Query["contains_native"]),
httpContext.Request.Query["slsa_build_type"].ToString(),
exportQueryService.ClampPageSize(ParseInt(httpContext.Request.Query["page_size"])),
filtersHash: string.Empty,
FiltersHash: string.Empty,
PagingKey: null);
var filtersHash = exportQueryService.ComputeFiltersHash(request);
@@ -1863,6 +1906,10 @@ app.MapPatch("/api/v1/findings/{findingId}/state", async Task<Results<Ok<StateTr
// Refresh Router endpoint cache
app.TryRefreshStellaRouterEndpoints(routerOptions);
// Map EWS scoring and webhook endpoints (SPRINT_8200.0012.0004)
app.MapScoringEndpoints();
app.MapWebhookEndpoints();
app.Run();
static Created<LedgerEventResponse> CreateCreatedResponse(LedgerEventRecord record)

View File

@@ -0,0 +1,429 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright © 2025 StellaOps
// Sprint: SPRINT_8200_0012_0004_api_endpoints
// Task: API-8200-003, API-8200-004 - Implement scoring service
using System.Diagnostics;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Options;
using StellaOps.Findings.Ledger.WebService.Contracts;
using StellaOps.Signals.EvidenceWeightedScore;
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
namespace StellaOps.Findings.Ledger.WebService.Services;
/// <summary>
/// Service for calculating evidence-weighted scores for findings.
/// </summary>
public interface IFindingScoringService
{
/// <summary>
/// Calculate score for a single finding.
/// </summary>
Task<EvidenceWeightedScoreResponse?> CalculateScoreAsync(
string findingId,
CalculateScoreRequest request,
CancellationToken ct);
/// <summary>
/// Calculate scores for multiple findings.
/// </summary>
Task<CalculateScoresBatchResponse> CalculateScoresBatchAsync(
CalculateScoresBatchRequest request,
CancellationToken ct);
/// <summary>
/// Get cached score for a finding.
/// </summary>
Task<EvidenceWeightedScoreResponse?> GetCachedScoreAsync(
string findingId,
CancellationToken ct);
/// <summary>
/// Get score history for a finding.
/// </summary>
Task<ScoreHistoryResponse?> GetScoreHistoryAsync(
string findingId,
DateTimeOffset? from,
DateTimeOffset? to,
int limit,
string? cursor,
CancellationToken ct);
/// <summary>
/// Get active scoring policy.
/// </summary>
Task<ScoringPolicyResponse> GetActivePolicyAsync(CancellationToken ct);
/// <summary>
/// Get specific policy version.
/// </summary>
Task<ScoringPolicyResponse?> GetPolicyVersionAsync(string version, CancellationToken ct);
}
/// <summary>
/// Configuration options for finding scoring service.
/// </summary>
public sealed class FindingScoringOptions
{
public const string SectionName = "Scoring";
/// <summary>
/// Default cache TTL for scores in minutes.
/// </summary>
public int CacheTtlMinutes { get; set; } = 60;
/// <summary>
/// Maximum batch size for bulk calculations.
/// </summary>
public int MaxBatchSize { get; set; } = 100;
/// <summary>
/// Maximum concurrent calculations in a batch.
/// </summary>
public int MaxConcurrency { get; set; } = 10;
}
/// <summary>
/// Implementation of finding scoring service using EWS calculator.
/// </summary>
public sealed class FindingScoringService : IFindingScoringService
{
private readonly INormalizerAggregator _normalizer;
private readonly IEvidenceWeightedScoreCalculator _calculator;
private readonly IEvidenceWeightPolicyProvider _policyProvider;
private readonly IFindingEvidenceProvider _evidenceProvider;
private readonly IScoreHistoryStore _historyStore;
private readonly IMemoryCache _cache;
private readonly FindingScoringOptions _options;
private readonly ILogger<FindingScoringService> _logger;
private static readonly TimeSpan DefaultCacheDuration = TimeSpan.FromMinutes(60);
private readonly string _environment;
public FindingScoringService(
INormalizerAggregator normalizer,
IEvidenceWeightedScoreCalculator calculator,
IEvidenceWeightPolicyProvider policyProvider,
IFindingEvidenceProvider evidenceProvider,
IScoreHistoryStore historyStore,
IMemoryCache cache,
IOptions<FindingScoringOptions> options,
ILogger<FindingScoringService> logger)
{
_normalizer = normalizer;
_calculator = calculator;
_policyProvider = policyProvider;
_evidenceProvider = evidenceProvider;
_historyStore = historyStore;
_cache = cache;
_options = options.Value;
_logger = logger;
_environment = Environment.GetEnvironmentVariable("STELLAOPS_ENVIRONMENT") ?? "production";
}
public async Task<EvidenceWeightedScoreResponse?> CalculateScoreAsync(
string findingId,
CalculateScoreRequest request,
CancellationToken ct)
{
// Check cache first unless force recalculate
if (!request.ForceRecalculate)
{
var cached = await GetCachedScoreAsync(findingId, ct);
if (cached is not null)
{
return cached with { FromCache = true };
}
}
// Get evidence for the finding
var evidence = await _evidenceProvider.GetEvidenceAsync(findingId, ct);
if (evidence is null)
{
_logger.LogWarning("No evidence found for finding {FindingId}", findingId);
return null;
}
// Get policy - use environment from config/env, tenant from request if available
var environment = request.PolicyVersion ?? _environment;
var policy = await _policyProvider.GetDefaultPolicyAsync(environment, ct);
// Normalize evidence into EvidenceWeightedScoreInput
var input = _normalizer.Aggregate(evidence);
var result = _calculator.Calculate(input, policy);
var now = DateTimeOffset.UtcNow;
var cacheDuration = TimeSpan.FromMinutes(_options.CacheTtlMinutes);
var response = MapToResponse(result, request.IncludeBreakdown, now, cacheDuration);
// Cache the result
var cacheKey = GetCacheKey(findingId);
_cache.Set(cacheKey, response, cacheDuration);
// Record in history
var historyRecord = new ScoreRecord
{
FindingId = findingId,
Score = response.Score,
Bucket = response.Bucket,
PolicyDigest = response.PolicyDigest,
CalculatedAt = now,
Trigger = request.ForceRecalculate ? "force_recalculate" : "calculation",
ChangedFactors = []
};
_historyStore.RecordScore(historyRecord);
return response;
}
public async Task<CalculateScoresBatchResponse> CalculateScoresBatchAsync(
CalculateScoresBatchRequest request,
CancellationToken ct)
{
var stopwatch = Stopwatch.StartNew();
var results = new List<EvidenceWeightedScoreResponse>();
var errors = new List<ScoringErrorDto>();
// Validate batch size
if (request.FindingIds.Count > _options.MaxBatchSize)
{
throw new InvalidOperationException(
$"Batch size {request.FindingIds.Count} exceeds maximum {_options.MaxBatchSize}");
}
// Get policy once for all calculations
var environment = request.PolicyVersion ?? _environment;
var policy = await _policyProvider.GetDefaultPolicyAsync(environment, ct);
// Process in parallel with limited concurrency
var semaphore = new SemaphoreSlim(_options.MaxConcurrency);
var tasks = request.FindingIds.Select(async findingId =>
{
await semaphore.WaitAsync(ct);
try
{
var singleRequest = new CalculateScoreRequest
{
ForceRecalculate = request.ForceRecalculate,
IncludeBreakdown = request.IncludeBreakdown,
PolicyVersion = request.PolicyVersion
};
var result = await CalculateScoreAsync(findingId, singleRequest, ct);
return (findingId, result, error: (ScoringErrorDto?)null);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to calculate score for finding {FindingId}", findingId);
return (findingId, result: (EvidenceWeightedScoreResponse?)null, error: new ScoringErrorDto
{
FindingId = findingId,
Code = ScoringErrorCodes.CalculationFailed,
Message = ex.Message
});
}
finally
{
semaphore.Release();
}
});
var taskResults = await Task.WhenAll(tasks);
foreach (var (findingId, result, error) in taskResults)
{
if (result is not null)
{
results.Add(result);
}
else if (error is not null)
{
errors.Add(error);
}
else
{
errors.Add(new ScoringErrorDto
{
FindingId = findingId,
Code = ScoringErrorCodes.FindingNotFound,
Message = "Finding not found or no evidence available"
});
}
}
stopwatch.Stop();
// Calculate summary statistics
var bucketCounts = results.GroupBy(r => r.Bucket)
.ToDictionary(g => g.Key, g => g.Count());
var summary = new BatchSummaryDto
{
Total = request.FindingIds.Count,
Succeeded = results.Count,
Failed = errors.Count,
ByBucket = new BucketDistributionDto
{
ActNow = bucketCounts.GetValueOrDefault("ActNow", 0),
ScheduleNext = bucketCounts.GetValueOrDefault("ScheduleNext", 0),
Investigate = bucketCounts.GetValueOrDefault("Investigate", 0),
Watchlist = bucketCounts.GetValueOrDefault("Watchlist", 0)
},
AverageScore = results.Count > 0 ? results.Average(r => r.Score) : 0,
CalculationTimeMs = stopwatch.Elapsed.TotalMilliseconds
};
return new CalculateScoresBatchResponse
{
Results = results,
Summary = summary,
Errors = errors.Count > 0 ? errors : null,
PolicyDigest = policy.ComputeDigest(),
CalculatedAt = DateTimeOffset.UtcNow
};
}
public Task<EvidenceWeightedScoreResponse?> GetCachedScoreAsync(
string findingId,
CancellationToken ct)
{
var cacheKey = GetCacheKey(findingId);
if (_cache.TryGetValue<EvidenceWeightedScoreResponse>(cacheKey, out var cached))
{
return Task.FromResult<EvidenceWeightedScoreResponse?>(cached with { FromCache = true });
}
return Task.FromResult<EvidenceWeightedScoreResponse?>(null);
}
public Task<ScoreHistoryResponse?> GetScoreHistoryAsync(
string findingId,
DateTimeOffset? from,
DateTimeOffset? to,
int limit,
string? cursor,
CancellationToken ct)
{
_logger.LogDebug("Getting score history for finding {FindingId}", findingId);
var history = _historyStore.GetHistory(findingId, from, to, limit, cursor);
return Task.FromResult(history);
}
public async Task<ScoringPolicyResponse> GetActivePolicyAsync(CancellationToken ct)
{
var policy = await _policyProvider.GetDefaultPolicyAsync(_environment, ct);
return MapPolicyToResponse(policy);
}
public async Task<ScoringPolicyResponse?> GetPolicyVersionAsync(string version, CancellationToken ct)
{
// Version is used as environment for policy lookup
var policy = await _policyProvider.GetDefaultPolicyAsync(version, ct);
return MapPolicyToResponse(policy);
}
private static string GetCacheKey(string findingId) => $"ews:score:{findingId}";
private static EvidenceWeightedScoreResponse MapToResponse(
EvidenceWeightedScoreResult result,
bool includeBreakdown,
DateTimeOffset calculatedAt,
TimeSpan cacheDuration)
{
return new EvidenceWeightedScoreResponse
{
FindingId = result.FindingId,
Score = result.Score,
Bucket = result.Bucket.ToString(),
Inputs = includeBreakdown ? new EvidenceInputsDto
{
Reachability = result.Inputs.Rch,
Runtime = result.Inputs.Rts,
Backport = result.Inputs.Bkp,
Exploit = result.Inputs.Xpl,
SourceTrust = result.Inputs.Src,
Mitigation = result.Inputs.Mit
} : null,
Weights = includeBreakdown ? new EvidenceWeightsDto
{
Reachability = result.Weights.Rch,
Runtime = result.Weights.Rts,
Backport = result.Weights.Bkp,
Exploit = result.Weights.Xpl,
SourceTrust = result.Weights.Src,
Mitigation = result.Weights.Mit
} : null,
Flags = includeBreakdown ? result.Flags : null,
Explanations = includeBreakdown ? result.Explanations : null,
Caps = includeBreakdown ? new AppliedCapsDto
{
SpeculativeCap = result.Caps.SpeculativeCap,
NotAffectedCap = result.Caps.NotAffectedCap,
RuntimeFloor = result.Caps.RuntimeFloor
} : null,
PolicyDigest = result.PolicyDigest,
CalculatedAt = calculatedAt,
CachedUntil = calculatedAt.Add(cacheDuration),
FromCache = false
};
}
private static ScoringPolicyResponse MapPolicyToResponse(EvidenceWeightPolicy policy)
{
return new ScoringPolicyResponse
{
Version = policy.Version,
Digest = policy.ComputeDigest(),
ActiveSince = policy.CreatedAt,
Environment = policy.Profile,
Weights = new EvidenceWeightsDto
{
Reachability = policy.Weights.Rch,
Runtime = policy.Weights.Rts,
Backport = policy.Weights.Bkp,
Exploit = policy.Weights.Xpl,
SourceTrust = policy.Weights.Src,
Mitigation = policy.Weights.Mit
},
Guardrails = new GuardrailsConfigDto
{
NotAffectedCap = new GuardrailDto
{
Enabled = policy.Guardrails.NotAffectedCap.Enabled,
MaxScore = policy.Guardrails.NotAffectedCap.MaxScore
},
RuntimeFloor = new GuardrailDto
{
Enabled = policy.Guardrails.RuntimeFloor.Enabled,
MinScore = policy.Guardrails.RuntimeFloor.MinScore
},
SpeculativeCap = new GuardrailDto
{
Enabled = policy.Guardrails.SpeculativeCap.Enabled,
MaxScore = policy.Guardrails.SpeculativeCap.MaxScore
}
},
Buckets = new BucketThresholdsDto
{
ActNowMin = policy.Buckets.ActNowMin,
ScheduleNextMin = policy.Buckets.ScheduleNextMin,
InvestigateMin = policy.Buckets.InvestigateMin
}
};
}
}
/// <summary>
/// Provider for finding evidence data.
/// </summary>
public interface IFindingEvidenceProvider
{
/// <summary>
/// Get evidence for a finding.
/// </summary>
Task<FindingEvidence?> GetEvidenceAsync(string findingId, CancellationToken ct);
}

View File

@@ -0,0 +1,166 @@
using System.Collections.Concurrent;
using StellaOps.Findings.Ledger.WebService.Contracts;
namespace StellaOps.Findings.Ledger.WebService.Services;
/// <summary>
/// Represents a recorded score entry for history tracking.
/// </summary>
public sealed record ScoreRecord
{
public required string FindingId { get; init; }
public required int Score { get; init; }
public required string Bucket { get; init; }
public required string PolicyDigest { get; init; }
public required DateTimeOffset CalculatedAt { get; init; }
public required string Trigger { get; init; }
public IReadOnlyList<string> ChangedFactors { get; init; } = [];
}
/// <summary>
/// Interface for score history storage.
/// </summary>
public interface IScoreHistoryStore
{
/// <summary>
/// Records a score calculation.
/// </summary>
void RecordScore(ScoreRecord record);
/// <summary>
/// Gets score history for a finding.
/// </summary>
ScoreHistoryResponse? GetHistory(
string findingId,
DateTimeOffset? from,
DateTimeOffset? to,
int limit,
string? cursor);
}
/// <summary>
/// In-memory implementation of score history storage.
/// </summary>
public sealed class InMemoryScoreHistoryStore : IScoreHistoryStore
{
private readonly ConcurrentDictionary<string, List<ScoreRecord>> _history = new();
private readonly TimeSpan _retentionPeriod = TimeSpan.FromDays(90);
private readonly int _maxEntriesPerFinding = 1000;
public void RecordScore(ScoreRecord record)
{
ArgumentNullException.ThrowIfNull(record);
var entries = _history.GetOrAdd(record.FindingId, _ => new List<ScoreRecord>());
lock (entries)
{
// Check if this is a duplicate (same score calculated at similar time)
var recent = entries.LastOrDefault();
if (recent is not null &&
recent.Score == record.Score &&
recent.Bucket == record.Bucket &&
Math.Abs((recent.CalculatedAt - record.CalculatedAt).TotalSeconds) < 1)
{
return; // Skip duplicate
}
entries.Add(record);
// Prune old entries
var cutoff = DateTimeOffset.UtcNow - _retentionPeriod;
entries.RemoveAll(e => e.CalculatedAt < cutoff);
// Limit total entries
if (entries.Count > _maxEntriesPerFinding)
{
entries.RemoveRange(0, entries.Count - _maxEntriesPerFinding);
}
}
}
public ScoreHistoryResponse? GetHistory(
string findingId,
DateTimeOffset? from,
DateTimeOffset? to,
int limit,
string? cursor)
{
if (!_history.TryGetValue(findingId, out var entries))
{
return new ScoreHistoryResponse
{
FindingId = findingId,
History = [],
Pagination = new PaginationDto { HasMore = false, NextCursor = null }
};
}
lock (entries)
{
// Parse cursor (offset-based)
var offset = 0;
if (!string.IsNullOrEmpty(cursor))
{
try
{
var decoded = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(cursor));
if (decoded.StartsWith("offset:", StringComparison.Ordinal))
{
offset = int.Parse(decoded["offset:".Length..], System.Globalization.CultureInfo.InvariantCulture);
}
}
catch
{
// Invalid cursor, start from beginning
}
}
// Filter by date range
var filtered = entries.AsEnumerable();
if (from.HasValue)
{
filtered = filtered.Where(e => e.CalculatedAt >= from.Value);
}
if (to.HasValue)
{
filtered = filtered.Where(e => e.CalculatedAt <= to.Value);
}
// Sort by date descending (most recent first)
var sorted = filtered.OrderByDescending(e => e.CalculatedAt).ToList();
// Apply pagination
var paged = sorted.Skip(offset).Take(limit + 1).ToList();
var hasMore = paged.Count > limit;
if (hasMore)
{
paged.RemoveAt(paged.Count - 1);
}
var historyEntries = paged.Select(e => new ScoreHistoryEntry
{
Score = e.Score,
Bucket = e.Bucket,
PolicyDigest = e.PolicyDigest,
CalculatedAt = e.CalculatedAt,
Trigger = e.Trigger,
ChangedFactors = e.ChangedFactors
}).ToList();
string? nextCursor = null;
if (hasMore)
{
var nextOffset = offset + limit;
nextCursor = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes($"offset:{nextOffset}"));
}
return new ScoreHistoryResponse
{
FindingId = findingId,
History = historyEntries,
Pagination = new PaginationDto { HasMore = hasMore, NextCursor = nextCursor }
};
}
}
}

View File

@@ -0,0 +1,300 @@
using System.Collections.Concurrent;
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.WebService.Contracts;
namespace StellaOps.Findings.Ledger.WebService.Services;
/// <summary>
/// Webhook registration record.
/// </summary>
public sealed record WebhookRegistration
{
public required Guid Id { get; init; }
public required string Url { get; init; }
public string? Secret { get; init; }
public IReadOnlyList<string>? FindingPatterns { get; init; }
public required int MinScoreChange { get; init; }
public required bool TriggerOnBucketChange { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public bool IsActive { get; init; } = true;
}
/// <summary>
/// Webhook payload for score changes.
/// </summary>
public sealed record ScoreChangeWebhookPayload
{
public required string EventType { get; init; }
public required string FindingId { get; init; }
public required int PreviousScore { get; init; }
public required int CurrentScore { get; init; }
public required string PreviousBucket { get; init; }
public required string CurrentBucket { get; init; }
public required int ScoreChange { get; init; }
public required bool BucketChanged { get; init; }
public required string PolicyDigest { get; init; }
public required DateTimeOffset Timestamp { get; init; }
}
/// <summary>
/// Interface for webhook storage.
/// </summary>
public interface IWebhookStore
{
WebhookRegistration Register(RegisterWebhookRequest request);
WebhookRegistration? Get(Guid id);
IReadOnlyList<WebhookRegistration> List();
bool Update(Guid id, RegisterWebhookRequest request);
bool Delete(Guid id);
IReadOnlyList<WebhookRegistration> GetMatchingWebhooks(string findingId, int scoreChange, bool bucketChanged);
}
/// <summary>
/// Interface for webhook delivery service.
/// </summary>
public interface IWebhookDeliveryService
{
Task NotifyScoreChangeAsync(
string findingId,
int previousScore,
int currentScore,
string previousBucket,
string currentBucket,
string policyDigest,
CancellationToken ct);
}
/// <summary>
/// In-memory webhook store.
/// </summary>
public sealed class InMemoryWebhookStore : IWebhookStore
{
private readonly ConcurrentDictionary<Guid, WebhookRegistration> _webhooks = new();
public WebhookRegistration Register(RegisterWebhookRequest request)
{
var registration = new WebhookRegistration
{
Id = Guid.NewGuid(),
Url = request.Url,
Secret = request.Secret,
FindingPatterns = request.FindingPatterns,
MinScoreChange = request.MinScoreChange,
TriggerOnBucketChange = request.TriggerOnBucketChange,
CreatedAt = DateTimeOffset.UtcNow,
IsActive = true
};
_webhooks[registration.Id] = registration;
return registration;
}
public WebhookRegistration? Get(Guid id)
{
return _webhooks.TryGetValue(id, out var reg) ? reg : null;
}
public IReadOnlyList<WebhookRegistration> List()
{
return _webhooks.Values.Where(w => w.IsActive).ToList();
}
public bool Update(Guid id, RegisterWebhookRequest request)
{
if (!_webhooks.TryGetValue(id, out var existing))
return false;
var updated = existing with
{
Url = request.Url,
Secret = request.Secret,
FindingPatterns = request.FindingPatterns,
MinScoreChange = request.MinScoreChange,
TriggerOnBucketChange = request.TriggerOnBucketChange
};
_webhooks[id] = updated;
return true;
}
public bool Delete(Guid id)
{
if (!_webhooks.TryGetValue(id, out var existing))
return false;
// Soft delete
_webhooks[id] = existing with { IsActive = false };
return true;
}
public IReadOnlyList<WebhookRegistration> GetMatchingWebhooks(string findingId, int scoreChange, bool bucketChanged)
{
return _webhooks.Values
.Where(w => w.IsActive)
.Where(w => MatchesFinding(w, findingId))
.Where(w => Math.Abs(scoreChange) >= w.MinScoreChange || (bucketChanged && w.TriggerOnBucketChange))
.ToList();
}
private static bool MatchesFinding(WebhookRegistration webhook, string findingId)
{
if (webhook.FindingPatterns is null || webhook.FindingPatterns.Count == 0)
return true; // No patterns = match all
foreach (var pattern in webhook.FindingPatterns)
{
if (pattern.EndsWith('*'))
{
var prefix = pattern[..^1];
if (findingId.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
return true;
}
else if (string.Equals(pattern, findingId, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
return false;
}
}
/// <summary>
/// Webhook delivery service with retry logic.
/// </summary>
public sealed class WebhookDeliveryService : IWebhookDeliveryService
{
private readonly IWebhookStore _store;
private readonly IHttpClientFactory _httpClientFactory;
private readonly ILogger<WebhookDeliveryService> _logger;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower
};
private static readonly int[] RetryDelaysMs = [100, 500, 2000, 5000];
public WebhookDeliveryService(
IWebhookStore store,
IHttpClientFactory httpClientFactory,
ILogger<WebhookDeliveryService> logger)
{
_store = store;
_httpClientFactory = httpClientFactory;
_logger = logger;
}
public async Task NotifyScoreChangeAsync(
string findingId,
int previousScore,
int currentScore,
string previousBucket,
string currentBucket,
string policyDigest,
CancellationToken ct)
{
var scoreChange = currentScore - previousScore;
var bucketChanged = !string.Equals(previousBucket, currentBucket, StringComparison.Ordinal);
var matchingWebhooks = _store.GetMatchingWebhooks(findingId, scoreChange, bucketChanged);
if (matchingWebhooks.Count == 0)
{
_logger.LogDebug("No webhooks matched for finding {FindingId} score change", findingId);
return;
}
var payload = new ScoreChangeWebhookPayload
{
EventType = "score.changed",
FindingId = findingId,
PreviousScore = previousScore,
CurrentScore = currentScore,
PreviousBucket = previousBucket,
CurrentBucket = currentBucket,
ScoreChange = scoreChange,
BucketChanged = bucketChanged,
PolicyDigest = policyDigest,
Timestamp = DateTimeOffset.UtcNow
};
var payloadJson = JsonSerializer.Serialize(payload, JsonOptions);
// Fire and forget delivery with retry (don't block the caller)
foreach (var webhook in matchingWebhooks)
{
_ = DeliverWithRetryAsync(webhook, payloadJson, ct);
}
}
private async Task DeliverWithRetryAsync(WebhookRegistration webhook, string payloadJson, CancellationToken ct)
{
using var client = _httpClientFactory.CreateClient("webhook-delivery");
for (var attempt = 0; attempt <= RetryDelaysMs.Length; attempt++)
{
if (attempt > 0)
{
var delay = RetryDelaysMs[Math.Min(attempt - 1, RetryDelaysMs.Length - 1)];
await Task.Delay(delay, ct).ConfigureAwait(false);
}
try
{
using var request = new HttpRequestMessage(HttpMethod.Post, webhook.Url)
{
Content = new StringContent(payloadJson, Encoding.UTF8, "application/json")
};
// Add signature if secret is configured
if (!string.IsNullOrEmpty(webhook.Secret))
{
var signature = ComputeHmacSignature(payloadJson, webhook.Secret);
request.Headers.TryAddWithoutValidation("X-Webhook-Signature", $"sha256={signature}");
}
request.Headers.TryAddWithoutValidation("X-Webhook-Id", webhook.Id.ToString());
request.Headers.TryAddWithoutValidation("X-Webhook-Timestamp", DateTimeOffset.UtcNow.ToUnixTimeSeconds().ToString());
using var response = await client.SendAsync(request, ct).ConfigureAwait(false);
if (response.IsSuccessStatusCode)
{
_logger.LogDebug(
"Webhook {WebhookId} delivered successfully to {Url}",
webhook.Id, webhook.Url);
return;
}
_logger.LogWarning(
"Webhook {WebhookId} delivery failed with status {StatusCode}, attempt {Attempt}",
webhook.Id, response.StatusCode, attempt + 1);
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
_logger.LogWarning(
ex,
"Webhook {WebhookId} delivery failed with exception, attempt {Attempt}",
webhook.Id, attempt + 1);
}
}
_logger.LogError(
"Webhook {WebhookId} delivery failed after all retries to {Url}",
webhook.Id, webhook.Url);
}
private static string ComputeHmacSignature(string payload, string secret)
{
var keyBytes = Encoding.UTF8.GetBytes(secret);
var payloadBytes = Encoding.UTF8.GetBytes(payload);
using var hmac = new HMACSHA256(keyBytes);
var hash = hmac.ComputeHash(payloadBytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -22,6 +22,7 @@
<ProjectReference Include="..\..\Scanner\__Libraries\StellaOps.Scanner.Reachability\StellaOps.Scanner.Reachability.csproj" />
<ProjectReference Include="..\..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Router.AspNet\StellaOps.Router.AspNet.csproj" />
<ProjectReference Include="..\..\Signals\StellaOps.Signals\StellaOps.Signals.csproj" />
</ItemGroup>
</Project>

View File

@@ -10,7 +10,7 @@ using StellaOps.Findings.Ledger.Options;
namespace StellaOps.Findings.Ledger.Infrastructure.Policy;
internal sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
public sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{

Some files were not shown because too many files have changed in this diff Show More