feat(api): Implement Console Export Client and Models
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
mock-dev-release / package-mock-release (push) Has been cancelled

- Added ConsoleExportClient for managing export requests and responses.
- Introduced ConsoleExportRequest and ConsoleExportResponse models.
- Implemented methods for creating and retrieving exports with appropriate headers.

feat(crypto): Add Software SM2/SM3 Cryptography Provider

- Implemented SmSoftCryptoProvider for software-only SM2/SM3 cryptography.
- Added support for signing and verification using SM2 algorithm.
- Included hashing functionality with SM3 algorithm.
- Configured options for loading keys from files and environment gate checks.

test(crypto): Add unit tests for SmSoftCryptoProvider

- Created comprehensive tests for signing, verifying, and hashing functionalities.
- Ensured correct behavior for key management and error handling.

feat(api): Enhance Console Export Models

- Expanded ConsoleExport models to include detailed status and event types.
- Added support for various export formats and notification options.

test(time): Implement TimeAnchorPolicyService tests

- Developed tests for TimeAnchorPolicyService to validate time anchors.
- Covered scenarios for anchor validation, drift calculation, and policy enforcement.
This commit is contained in:
StellaOps Bot
2025-12-07 00:27:33 +02:00
parent 9bd6a73926
commit 0de92144d2
229 changed files with 32351 additions and 1481 deletions

View File

@@ -19,6 +19,8 @@
- `docs/provenance/inline-dsse.md` (for provenance anchors/DSSE notes)
- `docs/modules/concelier/prep/2025-11-22-oas-obs-prep.md` (OAS + observability prep)
- `docs/modules/concelier/prep/2025-11-20-orchestrator-registry-prep.md` (orchestrator registry/control contracts)
- `docs/modules/policy/cvss-v4.md` (CVSS receipts model & hashing)
- `docs/product-advisories/25-Nov-2025 - Add CVSS v4.0 Score Receipts for Transparency.md` (vector provenance, DSSE expectations)
- Any sprint-specific ADRs/notes linked from `docs/implplan/SPRINT_0112_0001_0001_concelier_i.md`, `SPRINT_0113_0001_0002_concelier_ii.md`, or `SPRINT_0114_0001_0003_concelier_iii.md`.
## Working Agreements
@@ -28,6 +30,7 @@
- **Tenant safety:** every API/job must enforce tenant headers/guards; no cross-tenant leaks.
- **Schema gates:** LNM schema changes require docs + tests; update `link-not-merge-schema.md` and samples together.
- **Cross-module edits:** none without sprint note; if needed, log in sprint Execution Log and Decisions & Risks.
- **CVSS v4.0 ingest:** when vendor advisories ship CVSS v4.0 vectors, parse without mutation, store provenance (source id + observation path), and emit vectors unchanged to Policy receipts. Do not derive fields; attach DSSE/observation refs for Policy reuse.
## Coding & Observability Standards
- Target **.NET 10**; prefer latest C# preview features already enabled in repo.
@@ -49,4 +52,3 @@
- Update sprint tracker status (`TODO → DOING → DONE/BLOCKED`) when you start/finish/block work; mirror decisions in Execution Log and Decisions & Risks.
- If a design decision is needed, mark the task `BLOCKED` in the sprint doc and record the decision ask—do not pause the codebase.
- When changing contracts (APIs, schemas, telemetry, exports), update corresponding docs and link them from the sprint Decisions & Risks section.

View File

@@ -1,5 +1,5 @@
using System.ComponentModel.DataAnnotations;
using StellaOps.Concelier.Storage.Mongo.Orchestrator;
using StellaOps.Concelier.Core.Orchestration;
namespace StellaOps.Concelier.WebService.Contracts;

View File

@@ -62,8 +62,9 @@ using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Postgres;
using StellaOps.Provenance.Mongo;
using StellaOps.Concelier.Core.Attestation;
using StellaOps.Concelier.Core.Signals;
using AttestationClaims = StellaOps.Concelier.Core.Attestation.AttestationClaims;
using StellaOps.Concelier.Storage.Mongo.Orchestrator;
using StellaOps.Concelier.Core.Orchestration;
using System.Diagnostics.Metrics;
using StellaOps.Concelier.Models.Observations;
@@ -261,6 +262,12 @@ builder.Services.AddSingleton<IAdvisoryChunkCache, AdvisoryChunkCache>();
builder.Services.AddSingleton<IAdvisoryAiTelemetry, AdvisoryAiTelemetry>();
builder.Services.AddSingleton<EvidenceBundleAttestationBuilder>();
// Register signals services (CONCELIER-SIG-26-001)
builder.Services.AddConcelierSignalsServices();
// Register orchestration services (CONCELIER-ORCH-32-001)
builder.Services.AddConcelierOrchestrationServices();
var features = concelierOptions.Features ?? new ConcelierOptions.FeaturesOptions();
if (!features.NoMergeEnabled)
@@ -3698,6 +3705,220 @@ var concelierTimelineEndpoint = app.MapGet("/obs/concelier/timeline", async (
return Results.Empty;
});
// ==========================================
// Signals Endpoints (CONCELIER-SIG-26-001)
// Expose affected symbol/function lists for reachability scoring
// ==========================================
app.MapGet("/v1/signals/symbols", async (
HttpContext context,
[FromQuery(Name = "advisoryId")] string? advisoryId,
[FromQuery(Name = "purl")] string? purl,
[FromQuery(Name = "symbolType")] string? symbolType,
[FromQuery(Name = "source")] string? source,
[FromQuery(Name = "withLocation")] bool? withLocation,
[FromQuery(Name = "limit")] int? limit,
[FromQuery(Name = "offset")] int? offset,
[FromServices] IAffectedSymbolProvider symbolProvider,
CancellationToken cancellationToken) =>
{
ApplyNoCache(context.Response);
if (!TryResolveTenant(context, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var authorizationError = EnsureTenantAuthorized(context, tenant);
if (authorizationError is not null)
{
return authorizationError;
}
// Parse symbol types if provided
ImmutableArray<AffectedSymbolType>? symbolTypes = null;
if (!string.IsNullOrWhiteSpace(symbolType))
{
var types = symbolType.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
var parsed = new List<AffectedSymbolType>();
foreach (var t in types)
{
if (Enum.TryParse<AffectedSymbolType>(t, ignoreCase: true, out var parsedType))
{
parsed.Add(parsedType);
}
}
if (parsed.Count > 0)
{
symbolTypes = parsed.ToImmutableArray();
}
}
// Parse sources if provided
ImmutableArray<string>? sources = null;
if (!string.IsNullOrWhiteSpace(source))
{
sources = source.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.ToImmutableArray();
}
var options = new AffectedSymbolQueryOptions(
TenantId: tenant!,
AdvisoryId: advisoryId?.Trim(),
Purl: purl?.Trim(),
SymbolTypes: symbolTypes,
Sources: sources,
WithLocationOnly: withLocation,
Limit: Math.Clamp(limit ?? 100, 1, 500),
Offset: Math.Max(offset ?? 0, 0));
var result = await symbolProvider.QueryAsync(options, cancellationToken);
return Results.Ok(new SignalsSymbolQueryResponse(
Symbols: result.Symbols.Select(s => ToSymbolResponse(s)).ToList(),
TotalCount: result.TotalCount,
HasMore: result.HasMore,
ComputedAt: result.ComputedAt.ToString("O", CultureInfo.InvariantCulture)));
}).WithName("QueryAffectedSymbols");
app.MapGet("/v1/signals/symbols/advisory/{advisoryId}", async (
HttpContext context,
string advisoryId,
[FromServices] IAffectedSymbolProvider symbolProvider,
CancellationToken cancellationToken) =>
{
ApplyNoCache(context.Response);
if (!TryResolveTenant(context, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var authorizationError = EnsureTenantAuthorized(context, tenant);
if (authorizationError is not null)
{
return authorizationError;
}
if (string.IsNullOrWhiteSpace(advisoryId))
{
return ConcelierProblemResultFactory.AdvisoryIdRequired(context);
}
var symbolSet = await symbolProvider.GetByAdvisoryAsync(tenant!, advisoryId.Trim(), cancellationToken);
return Results.Ok(ToSymbolSetResponse(symbolSet));
}).WithName("GetAffectedSymbolsByAdvisory");
app.MapGet("/v1/signals/symbols/package/{*purl}", async (
HttpContext context,
string purl,
[FromServices] IAffectedSymbolProvider symbolProvider,
CancellationToken cancellationToken) =>
{
ApplyNoCache(context.Response);
if (!TryResolveTenant(context, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var authorizationError = EnsureTenantAuthorized(context, tenant);
if (authorizationError is not null)
{
return authorizationError;
}
if (string.IsNullOrWhiteSpace(purl))
{
return Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "Package URL required",
detail: "The purl parameter is required.",
type: "https://stellaops.org/problems/validation");
}
var symbolSet = await symbolProvider.GetByPackageAsync(tenant!, purl.Trim(), cancellationToken);
return Results.Ok(ToSymbolSetResponse(symbolSet));
}).WithName("GetAffectedSymbolsByPackage");
app.MapPost("/v1/signals/symbols/batch", async (
HttpContext context,
[FromBody] SignalsSymbolBatchRequest request,
[FromServices] IAffectedSymbolProvider symbolProvider,
CancellationToken cancellationToken) =>
{
ApplyNoCache(context.Response);
if (!TryResolveTenant(context, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var authorizationError = EnsureTenantAuthorized(context, tenant);
if (authorizationError is not null)
{
return authorizationError;
}
if (request.AdvisoryIds is not { Count: > 0 })
{
return Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "Advisory IDs required",
detail: "At least one advisoryId is required in the batch request.",
type: "https://stellaops.org/problems/validation");
}
if (request.AdvisoryIds.Count > 100)
{
return Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "Batch size exceeded",
detail: "Maximum batch size is 100 advisory IDs.",
type: "https://stellaops.org/problems/validation");
}
var results = await symbolProvider.GetByAdvisoriesBatchAsync(tenant!, request.AdvisoryIds, cancellationToken);
var response = new SignalsSymbolBatchResponse(
Results: results.ToDictionary(
kvp => kvp.Key,
kvp => ToSymbolSetResponse(kvp.Value)));
return Results.Ok(response);
}).WithName("GetAffectedSymbolsBatch");
app.MapGet("/v1/signals/symbols/exists/{advisoryId}", async (
HttpContext context,
string advisoryId,
[FromServices] IAffectedSymbolProvider symbolProvider,
CancellationToken cancellationToken) =>
{
ApplyNoCache(context.Response);
if (!TryResolveTenant(context, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var authorizationError = EnsureTenantAuthorized(context, tenant);
if (authorizationError is not null)
{
return authorizationError;
}
if (string.IsNullOrWhiteSpace(advisoryId))
{
return ConcelierProblemResultFactory.AdvisoryIdRequired(context);
}
var exists = await symbolProvider.HasSymbolsAsync(tenant!, advisoryId.Trim(), cancellationToken);
return Results.Ok(new SignalsSymbolExistsResponse(Exists: exists, AdvisoryId: advisoryId.Trim()));
}).WithName("CheckAffectedSymbolsExist");
await app.RunAsync();
}
@@ -3718,6 +3939,112 @@ private readonly record struct LinksetObservationSummary(
public static LinksetObservationSummary Empty { get; } = new(null, null, null, null);
}
// ==========================================
// Signals API Response Types (CONCELIER-SIG-26-001)
// ==========================================
record SignalsSymbolQueryResponse(
List<SignalsSymbolResponse> Symbols,
int TotalCount,
bool HasMore,
string ComputedAt);
record SignalsSymbolResponse(
string AdvisoryId,
string ObservationId,
string Symbol,
string SymbolType,
string? Purl,
string? Module,
string? ClassName,
string? FilePath,
int? LineNumber,
string? VersionRange,
string CanonicalId,
bool HasSourceLocation,
SignalsSymbolProvenanceResponse Provenance);
record SignalsSymbolProvenanceResponse(
string Source,
string Vendor,
string ObservationHash,
string FetchedAt,
string? IngestJobId,
string? UpstreamId,
string? UpstreamUrl);
record SignalsSymbolSetResponse(
string TenantId,
string AdvisoryId,
List<SignalsSymbolResponse> Symbols,
List<SignalsSymbolSourceSummaryResponse> SourceSummaries,
int UniqueSymbolCount,
bool HasSourceLocations,
string ComputedAt);
record SignalsSymbolSourceSummaryResponse(
string Source,
int SymbolCount,
int WithLocationCount,
Dictionary<string, int> CountByType,
string LatestFetchAt);
record SignalsSymbolBatchRequest(
List<string> AdvisoryIds);
record SignalsSymbolBatchResponse(
Dictionary<string, SignalsSymbolSetResponse> Results);
record SignalsSymbolExistsResponse(
bool Exists,
string AdvisoryId);
// ==========================================
// Signals API Helper Methods
// ==========================================
static SignalsSymbolResponse ToSymbolResponse(AffectedSymbol symbol)
{
return new SignalsSymbolResponse(
AdvisoryId: symbol.AdvisoryId,
ObservationId: symbol.ObservationId,
Symbol: symbol.Symbol,
SymbolType: symbol.SymbolType.ToString(),
Purl: symbol.Purl,
Module: symbol.Module,
ClassName: symbol.ClassName,
FilePath: symbol.FilePath,
LineNumber: symbol.LineNumber,
VersionRange: symbol.VersionRange,
CanonicalId: symbol.CanonicalId,
HasSourceLocation: symbol.HasSourceLocation,
Provenance: new SignalsSymbolProvenanceResponse(
Source: symbol.Provenance.Source,
Vendor: symbol.Provenance.Vendor,
ObservationHash: symbol.Provenance.ObservationHash,
FetchedAt: symbol.Provenance.FetchedAt.ToString("O", CultureInfo.InvariantCulture),
IngestJobId: symbol.Provenance.IngestJobId,
UpstreamId: symbol.Provenance.UpstreamId,
UpstreamUrl: symbol.Provenance.UpstreamUrl));
}
static SignalsSymbolSetResponse ToSymbolSetResponse(AffectedSymbolSet symbolSet)
{
return new SignalsSymbolSetResponse(
TenantId: symbolSet.TenantId,
AdvisoryId: symbolSet.AdvisoryId,
Symbols: symbolSet.Symbols.Select(ToSymbolResponse).ToList(),
SourceSummaries: symbolSet.SourceSummaries.Select(s => new SignalsSymbolSourceSummaryResponse(
Source: s.Source,
SymbolCount: s.SymbolCount,
WithLocationCount: s.WithLocationCount,
CountByType: s.CountByType.ToDictionary(kvp => kvp.Key.ToString(), kvp => kvp.Value),
LatestFetchAt: s.LatestFetchAt.ToString("O", CultureInfo.InvariantCulture))).ToList(),
UniqueSymbolCount: symbolSet.UniqueSymbolCount,
HasSourceLocations: symbolSet.HasSourceLocations,
ComputedAt: symbolSet.ComputedAt.ToString("O", CultureInfo.InvariantCulture));
}
static PluginHostOptions BuildPluginOptions(ConcelierOptions options, string contentRoot)
{
var pluginOptions = new PluginHostOptions

View File

@@ -291,18 +291,6 @@ Global
{A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x64.Build.0 = Release|Any CPU
{A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x86.ActiveCfg = Release|Any CPU
{A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x86.Build.0 = Release|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x64.ActiveCfg = Debug|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x64.Build.0 = Debug|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x86.ActiveCfg = Debug|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x86.Build.0 = Debug|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|Any CPU.Build.0 = Release|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x64.ActiveCfg = Release|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x64.Build.0 = Release|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x86.ActiveCfg = Release|Any CPU
{C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x86.Build.0 = Release|Any CPU
{2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|x64.ActiveCfg = Debug|Any CPU
@@ -1227,18 +1215,6 @@ Global
{7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x64.Build.0 = Release|Any CPU
{7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x86.ActiveCfg = Release|Any CPU
{7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x86.Build.0 = Release|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x64.ActiveCfg = Debug|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x64.Build.0 = Debug|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x86.ActiveCfg = Debug|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x86.Build.0 = Debug|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|Any CPU.Build.0 = Release|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x64.ActiveCfg = Release|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x64.Build.0 = Release|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x86.ActiveCfg = Release|Any CPU
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x86.Build.0 = Release|Any CPU
{664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|x64.ActiveCfg = Debug|Any CPU
@@ -1284,7 +1260,6 @@ Global
{841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
{EEC52FA0-8E78-4FCB-9454-D697F58B2118} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
{628700D6-97A5-4506-BC78-22E2A76C68E3} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
{C926373D-5ACB-4E62-96D5-264EF4C61BE5} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
{2D68125A-0ACD-4015-A8FA-B54284B8A3CB} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
{7760219F-6C19-4B61-9015-73BB02005C0B} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
{F87DFC58-EE3E-4E2F-9E17-E6A6924F2998} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
@@ -1356,7 +1331,6 @@ Global
{2EB876DE-E940-4A7E-8E3D-804E2E6314DA} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
{C4C2037E-B301-4449-96D6-C6B165752E1A} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
{7B995CBB-3D20-4509-9300-EC012C18C4B4} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
{664A2577-6DA1-42DA-A213-3253017FA4BF} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
{39C1D44C-389F-4502-ADCF-E4AC359E8F8F} = {176B5A8A-7857-3ECD-1128-3C721BC7F5C6}
{85D215EC-DCFE-4F7F-BB07-540DCF66BE8C} = {41F15E67-7190-CF23-3BC4-77E87134CADD}

View File

@@ -1,90 +1,56 @@
using MongoDB.Bson;
using MongoDB.Driver;
using MongoDB.Driver.GridFS;
namespace StellaOps.Concelier.Connector.Common.Fetch;
/// <summary>
/// Handles persistence of raw upstream documents in GridFS buckets for later parsing.
/// </summary>
public sealed class RawDocumentStorage
{
private const string BucketName = "documents";
private readonly IMongoDatabase _database;
public RawDocumentStorage(IMongoDatabase database)
{
_database = database ?? throw new ArgumentNullException(nameof(database));
}
private GridFSBucket CreateBucket() => new(_database, new GridFSBucketOptions
{
BucketName = BucketName,
WriteConcern = _database.Settings.WriteConcern,
ReadConcern = _database.Settings.ReadConcern,
});
public Task<ObjectId> UploadAsync(
string sourceName,
string uri,
byte[] content,
string? contentType,
CancellationToken cancellationToken)
=> UploadAsync(sourceName, uri, content, contentType, expiresAt: null, cancellationToken);
public async Task<ObjectId> UploadAsync(
string sourceName,
string uri,
byte[] content,
string? contentType,
DateTimeOffset? expiresAt,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrEmpty(sourceName);
ArgumentException.ThrowIfNullOrEmpty(uri);
ArgumentNullException.ThrowIfNull(content);
var bucket = CreateBucket();
var filename = $"{sourceName}/{Guid.NewGuid():N}";
var metadata = new BsonDocument
{
["sourceName"] = sourceName,
["uri"] = uri,
};
if (!string.IsNullOrWhiteSpace(contentType))
{
metadata["contentType"] = contentType;
}
if (expiresAt.HasValue)
{
metadata["expiresAt"] = expiresAt.Value.UtcDateTime;
}
return await bucket.UploadFromBytesAsync(filename, content, new GridFSUploadOptions
{
Metadata = metadata,
}, cancellationToken).ConfigureAwait(false);
}
public Task<byte[]> DownloadAsync(ObjectId id, CancellationToken cancellationToken)
{
var bucket = CreateBucket();
return bucket.DownloadAsBytesAsync(id, cancellationToken: cancellationToken);
}
public async Task DeleteAsync(ObjectId id, CancellationToken cancellationToken)
{
var bucket = CreateBucket();
try
{
await bucket.DeleteAsync(id, cancellationToken).ConfigureAwait(false);
}
catch (GridFSFileNotFoundException)
{
// Already removed; ignore.
}
}
}
using System.Collections.Concurrent;
using MongoDB.Bson;
namespace StellaOps.Concelier.Connector.Common.Fetch;
/// <summary>
/// Handles persistence of raw upstream documents for later parsing (Postgres/in-memory implementation).
/// </summary>
public sealed class RawDocumentStorage
{
private readonly ConcurrentDictionary<ObjectId, byte[]> _blobs = new();
public Task<ObjectId> UploadAsync(
string sourceName,
string uri,
byte[] content,
string? contentType,
CancellationToken cancellationToken)
=> UploadAsync(sourceName, uri, content, contentType, expiresAt: null, cancellationToken);
public async Task<ObjectId> UploadAsync(
string sourceName,
string uri,
byte[] content,
string? contentType,
DateTimeOffset? expiresAt,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrEmpty(sourceName);
ArgumentException.ThrowIfNullOrEmpty(uri);
ArgumentNullException.ThrowIfNull(content);
var id = ObjectId.GenerateNewId();
var copy = new byte[content.Length];
Buffer.BlockCopy(content, 0, copy, 0, content.Length);
_blobs[id] = copy;
await Task.CompletedTask.ConfigureAwait(false);
return id;
}
public Task<byte[]> DownloadAsync(ObjectId id, CancellationToken cancellationToken)
{
if (_blobs.TryGetValue(id, out var bytes))
{
return Task.FromResult(bytes);
}
throw new MongoDB.Driver.GridFSFileNotFoundException($"Blob {id} not found.");
}
public async Task DeleteAsync(ObjectId id, CancellationToken cancellationToken)
{
_blobs.TryRemove(id, out _);
await Task.CompletedTask.ConfigureAwait(false);
}
}

View File

@@ -1,12 +1,13 @@
using System.Net;
using System.Net.Http;
using System.Net.Security;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Xml;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.Core.Linksets;
using System.Net;
using System.Net.Http;
using System.Net.Security;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Xml;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.Core.Linksets;
using StellaOps.Concelier.Storage.Mongo;
namespace StellaOps.Concelier.Connector.Common.Http;
@@ -161,18 +162,19 @@ public static class ServiceCollectionExtensions
{
ArgumentNullException.ThrowIfNull(services);
services.AddSingleton<Json.JsonSchemaValidator>();
services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>());
services.AddSingleton<XmlSchemaValidator>();
services.AddSingleton<IXmlSchemaValidator>(sp => sp.GetRequiredService<XmlSchemaValidator>());
services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>();
services.AddConcelierAocGuards();
services.AddConcelierLinksetMappers();
services.AddSingleton<Fetch.RawDocumentStorage>();
services.AddSingleton<Fetch.SourceFetchService>();
return services;
}
services.AddSingleton<Json.JsonSchemaValidator>();
services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>());
services.AddSingleton<XmlSchemaValidator>();
services.AddSingleton<IXmlSchemaValidator>(sp => sp.GetRequiredService<XmlSchemaValidator>());
services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>();
services.AddConcelierAocGuards();
services.AddConcelierLinksetMappers();
services.AddSingleton<IDocumentStore, InMemoryDocumentStore>();
services.AddSingleton<Fetch.RawDocumentStorage>();
services.AddSingleton<Fetch.SourceFetchService>();
return services;
}
private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options)
{

View File

@@ -8,7 +8,6 @@
<ItemGroup>
<PackageReference Include="JsonSchema.Net" Version="5.3.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="10.0.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="AngleSharp" Version="1.1.1" />
<PackageReference Include="UglyToad.PdfPig" Version="1.7.0-custom-5" />
<PackageReference Include="NuGet.Versioning" Version="6.9.1" />
@@ -18,5 +17,6 @@
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Postgres\StellaOps.Concelier.Storage.Postgres.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -0,0 +1,275 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Service for executing orchestrator-driven backfills.
/// Per CONCELIER-ORCH-34-001: Execute orchestrator-driven backfills reusing
/// artifact hashes/signatures, logging provenance, and pushing run metadata to ledger.
/// </summary>
public interface IBackfillExecutor
{
/// <summary>
/// Executes a backfill operation.
/// </summary>
/// <param name="context">Execution context.</param>
/// <param name="executeStep">Function to execute each step of the backfill.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The generated run manifest.</returns>
Task<OrchestratorRunManifest> ExecuteBackfillAsync(
ConnectorExecutionContext context,
Func<string?, string?, CancellationToken, Task<BackfillStepResult>> executeStep,
CancellationToken cancellationToken);
/// <summary>
/// Gets an existing manifest for a run.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The manifest if found, null otherwise.</returns>
Task<OrchestratorRunManifest?> GetManifestAsync(
string tenant,
string connectorId,
Guid runId,
CancellationToken cancellationToken);
}
/// <summary>
/// Result of a backfill step execution.
/// </summary>
public sealed record BackfillStepResult
{
/// <summary>
/// Whether the step completed successfully.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// The cursor position after this step (for the next step's fromCursor).
/// </summary>
public string? NextCursor { get; init; }
/// <summary>
/// Hashes of artifacts produced in this step.
/// </summary>
public IReadOnlyList<string> ArtifactHashes { get; init; } = [];
/// <summary>
/// Whether there are more items to process.
/// </summary>
public bool HasMore { get; init; }
/// <summary>
/// Error message if the step failed.
/// </summary>
public string? ErrorMessage { get; init; }
}
/// <summary>
/// Default implementation of <see cref="IBackfillExecutor"/>.
/// </summary>
public sealed class BackfillExecutor : IBackfillExecutor
{
private readonly IOrchestratorRegistryStore _store;
private readonly TimeProvider _timeProvider;
private readonly ILogger<BackfillExecutor> _logger;
public BackfillExecutor(
IOrchestratorRegistryStore store,
TimeProvider timeProvider,
ILogger<BackfillExecutor> logger)
{
ArgumentNullException.ThrowIfNull(store);
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentNullException.ThrowIfNull(logger);
_store = store;
_timeProvider = timeProvider;
_logger = logger;
}
/// <inheritdoc />
public async Task<OrchestratorRunManifest> ExecuteBackfillAsync(
ConnectorExecutionContext context,
Func<string?, string?, CancellationToken, Task<BackfillStepResult>> executeStep,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(executeStep);
var fromCursor = context.BackfillRange?.FromCursor;
var toCursor = context.BackfillRange?.ToCursor;
var allArtifactHashes = new List<string>();
var currentCursor = fromCursor;
_logger.LogInformation(
"Starting backfill for {ConnectorId} run {RunId}: cursor range [{FromCursor}, {ToCursor}]",
context.ConnectorId,
context.RunId,
fromCursor ?? "(start)",
toCursor ?? "(end)");
int stepCount = 0;
bool hasMore = true;
while (hasMore && !cancellationToken.IsCancellationRequested)
{
// Check if we should continue (pause/throttle handling)
if (!await context.Worker.CheckContinueAsync(cancellationToken).ConfigureAwait(false))
{
_logger.LogWarning(
"Backfill for {ConnectorId} run {RunId} interrupted at cursor {Cursor}",
context.ConnectorId,
context.RunId,
currentCursor);
break;
}
stepCount++;
// Execute the step
var result = await executeStep(currentCursor, toCursor, cancellationToken).ConfigureAwait(false);
if (!result.Success)
{
_logger.LogError(
"Backfill step {Step} failed for {ConnectorId} run {RunId}: {Error}",
stepCount,
context.ConnectorId,
context.RunId,
result.ErrorMessage);
await context.Worker.CompleteFailureAsync(
"BACKFILL_STEP_FAILED",
60, // Retry after 1 minute
cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Backfill step failed: {result.ErrorMessage}");
}
// Record artifacts
foreach (var hash in result.ArtifactHashes)
{
context.Worker.RecordArtifact(hash);
allArtifactHashes.Add(hash);
}
// Report progress
if (!string.IsNullOrEmpty(result.NextCursor))
{
var lastHash = result.ArtifactHashes.LastOrDefault();
await context.Worker.ReportProgressAsync(
CalculateProgress(currentCursor, result.NextCursor, toCursor),
lastHash,
"linkset",
cancellationToken).ConfigureAwait(false);
}
currentCursor = result.NextCursor;
hasMore = result.HasMore;
_logger.LogDebug(
"Backfill step {Step} completed for {ConnectorId} run {RunId}: {ArtifactCount} artifacts, hasMore={HasMore}",
stepCount,
context.ConnectorId,
context.RunId,
result.ArtifactHashes.Count,
hasMore);
}
// Create manifest
var manifest = new OrchestratorRunManifest(
context.RunId,
context.ConnectorId,
context.Tenant,
new OrchestratorBackfillRange(fromCursor, currentCursor ?? toCursor),
allArtifactHashes.AsReadOnly(),
ComputeDsseEnvelopeHash(context.RunId, allArtifactHashes),
_timeProvider.GetUtcNow());
// Store manifest
await _store.StoreManifestAsync(manifest, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Backfill completed for {ConnectorId} run {RunId}: {StepCount} steps, {ArtifactCount} artifacts, DSSE hash {DsseHash}",
context.ConnectorId,
context.RunId,
stepCount,
allArtifactHashes.Count,
manifest.DsseEnvelopeHash);
return manifest;
}
/// <inheritdoc />
public Task<OrchestratorRunManifest?> GetManifestAsync(
string tenant,
string connectorId,
Guid runId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
return _store.GetManifestAsync(tenant, connectorId, runId, cancellationToken);
}
private static int CalculateProgress(string? currentCursor, string? nextCursor, string? toCursor)
{
// Simple progress estimation
// In a real implementation, this would be based on cursor comparison
if (string.IsNullOrEmpty(toCursor))
{
return 50; // Unknown end
}
if (nextCursor == toCursor)
{
return 100;
}
// Default to partial progress
return 50;
}
private static string? ComputeDsseEnvelopeHash(Guid runId, IReadOnlyList<string> artifactHashes)
{
if (artifactHashes.Count == 0)
{
return null;
}
// Create a deterministic DSSE-style envelope hash
// Format: sha256(runId + sorted artifact hashes)
var content = $"{runId}|{string.Join("|", artifactHashes.OrderBy(h => h))}";
return ConnectorExecutionContext.ComputeHash(content);
}
}
/// <summary>
/// Options for backfill execution.
/// </summary>
public sealed record BackfillOptions
{
/// <summary>
/// Maximum number of items per step.
/// </summary>
public int BatchSize { get; init; } = 100;
/// <summary>
/// Delay between steps (for rate limiting).
/// </summary>
public TimeSpan StepDelay { get; init; } = TimeSpan.FromMilliseconds(100);
/// <summary>
/// Maximum number of retry attempts per step.
/// </summary>
public int MaxRetries { get; init; } = 3;
/// <summary>
/// Initial retry delay (doubles with each retry).
/// </summary>
public TimeSpan InitialRetryDelay { get; init; } = TimeSpan.FromSeconds(1);
}

View File

@@ -0,0 +1,116 @@
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Metadata describing a connector's orchestrator registration requirements.
/// Per CONCELIER-ORCH-32-001: Register every advisory connector with orchestrator
/// (metadata, auth scopes, rate policies) for transparent, reproducible scheduling.
/// </summary>
public sealed record ConnectorMetadata
{
/// <summary>
/// Unique connector identifier (lowercase slug).
/// </summary>
public required string ConnectorId { get; init; }
/// <summary>
/// Advisory provider source (nvd, ghsa, osv, icscisa, kisa, vendor:slug).
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Human-readable display name.
/// </summary>
public string? DisplayName { get; init; }
/// <summary>
/// Connector description.
/// </summary>
public string? Description { get; init; }
/// <summary>
/// Capability flags: observations, linksets, timeline, attestations.
/// </summary>
public IReadOnlyList<string> Capabilities { get; init; } = ["observations", "linksets"];
/// <summary>
/// Types of artifacts this connector produces.
/// </summary>
public IReadOnlyList<string> ArtifactKinds { get; init; } = ["raw-advisory", "normalized", "linkset"];
/// <summary>
/// Default schedule (cron expression).
/// </summary>
public string DefaultCron { get; init; } = "0 */6 * * *"; // Every 6 hours
/// <summary>
/// Default time zone for scheduling.
/// </summary>
public string DefaultTimeZone { get; init; } = "UTC";
/// <summary>
/// Maximum parallel runs allowed.
/// </summary>
public int MaxParallelRuns { get; init; } = 1;
/// <summary>
/// Maximum lag in minutes before alert/retry triggers.
/// </summary>
public int MaxLagMinutes { get; init; } = 360; // 6 hours
/// <summary>
/// Default requests per minute limit.
/// </summary>
public int DefaultRpm { get; init; } = 60;
/// <summary>
/// Default burst capacity.
/// </summary>
public int DefaultBurst { get; init; } = 10;
/// <summary>
/// Default cooldown period after burst exhaustion.
/// </summary>
public int DefaultCooldownSeconds { get; init; } = 30;
/// <summary>
/// Allowed egress hosts (for airgap mode).
/// </summary>
public IReadOnlyList<string> EgressAllowlist { get; init; } = [];
/// <summary>
/// Reference to secrets store key (never inlined).
/// </summary>
public string? AuthRef { get; init; }
}
/// <summary>
/// Interface for connectors to provide their orchestrator metadata.
/// </summary>
public interface IConnectorMetadataProvider
{
/// <summary>
/// Gets the connector's orchestrator registration metadata.
/// </summary>
ConnectorMetadata GetMetadata();
}
/// <summary>
/// Default metadata provider that derives metadata from connector name.
/// </summary>
public sealed class DefaultConnectorMetadataProvider : IConnectorMetadataProvider
{
private readonly string _sourceName;
public DefaultConnectorMetadataProvider(string sourceName)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceName);
_sourceName = sourceName.ToLowerInvariant();
}
public ConnectorMetadata GetMetadata() => new()
{
ConnectorId = _sourceName,
Source = _sourceName,
DisplayName = _sourceName.ToUpperInvariant()
};
}

View File

@@ -0,0 +1,266 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Service for registering connectors with the orchestrator.
/// Per CONCELIER-ORCH-32-001: Register every advisory connector with orchestrator
/// (metadata, auth scopes, rate policies) for transparent, reproducible scheduling.
/// </summary>
public interface IConnectorRegistrationService
{
/// <summary>
/// Registers a connector with the orchestrator.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="metadata">Connector metadata.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The created or updated registry record.</returns>
Task<OrchestratorRegistryRecord> RegisterAsync(
string tenant,
ConnectorMetadata metadata,
CancellationToken cancellationToken);
/// <summary>
/// Registers multiple connectors with the orchestrator.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="metadataList">List of connector metadata.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The created or updated registry records.</returns>
Task<IReadOnlyList<OrchestratorRegistryRecord>> RegisterBatchAsync(
string tenant,
IEnumerable<ConnectorMetadata> metadataList,
CancellationToken cancellationToken);
/// <summary>
/// Gets the registry record for a connector.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The registry record, or null if not found.</returns>
Task<OrchestratorRegistryRecord?> GetRegistrationAsync(
string tenant,
string connectorId,
CancellationToken cancellationToken);
/// <summary>
/// Lists all registered connectors for a tenant.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>All registry records for the tenant.</returns>
Task<IReadOnlyList<OrchestratorRegistryRecord>> ListRegistrationsAsync(
string tenant,
CancellationToken cancellationToken);
}
/// <summary>
/// Default implementation of <see cref="IConnectorRegistrationService"/>.
/// </summary>
public sealed class ConnectorRegistrationService : IConnectorRegistrationService
{
private readonly IOrchestratorRegistryStore _store;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ConnectorRegistrationService> _logger;
public ConnectorRegistrationService(
IOrchestratorRegistryStore store,
TimeProvider timeProvider,
ILogger<ConnectorRegistrationService> logger)
{
ArgumentNullException.ThrowIfNull(store);
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentNullException.ThrowIfNull(logger);
_store = store;
_timeProvider = timeProvider;
_logger = logger;
}
/// <inheritdoc />
public async Task<OrchestratorRegistryRecord> RegisterAsync(
string tenant,
ConnectorMetadata metadata,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentNullException.ThrowIfNull(metadata);
var now = _timeProvider.GetUtcNow();
var lockKey = $"concelier:{tenant}:{metadata.ConnectorId}";
var record = new OrchestratorRegistryRecord(
tenant,
metadata.ConnectorId,
metadata.Source,
metadata.Capabilities.ToList(),
metadata.AuthRef ?? $"secret:concelier/{metadata.ConnectorId}/api-key",
new OrchestratorSchedule(
metadata.DefaultCron,
metadata.DefaultTimeZone,
metadata.MaxParallelRuns,
metadata.MaxLagMinutes),
new OrchestratorRatePolicy(
metadata.DefaultRpm,
metadata.DefaultBurst,
metadata.DefaultCooldownSeconds),
metadata.ArtifactKinds.ToList(),
lockKey,
new OrchestratorEgressGuard(
metadata.EgressAllowlist.ToList(),
metadata.EgressAllowlist.Count > 0), // airgapMode true if allowlist specified
now,
now);
await _store.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Registered connector {ConnectorId} for tenant {Tenant} with source {Source}",
metadata.ConnectorId,
tenant,
metadata.Source);
return record;
}
/// <inheritdoc />
public async Task<IReadOnlyList<OrchestratorRegistryRecord>> RegisterBatchAsync(
string tenant,
IEnumerable<ConnectorMetadata> metadataList,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentNullException.ThrowIfNull(metadataList);
var results = new List<OrchestratorRegistryRecord>();
foreach (var metadata in metadataList)
{
var record = await RegisterAsync(tenant, metadata, cancellationToken).ConfigureAwait(false);
results.Add(record);
}
_logger.LogInformation(
"Batch registered {Count} connectors for tenant {Tenant}",
results.Count,
tenant);
return results.AsReadOnly();
}
/// <inheritdoc />
public Task<OrchestratorRegistryRecord?> GetRegistrationAsync(
string tenant,
string connectorId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
return _store.GetAsync(tenant, connectorId, cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<OrchestratorRegistryRecord>> ListRegistrationsAsync(
string tenant,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
return _store.ListAsync(tenant, cancellationToken);
}
}
/// <summary>
/// Metadata for well-known advisory connectors.
/// Provides default metadata configurations for standard StellaOps connectors.
/// </summary>
public static class WellKnownConnectors
{
/// <summary>
/// NVD (National Vulnerability Database) connector metadata.
/// </summary>
public static ConnectorMetadata Nvd => new()
{
ConnectorId = "nvd",
Source = "nvd",
DisplayName = "NVD",
Description = "NIST National Vulnerability Database",
Capabilities = ["observations", "linksets", "timeline"],
ArtifactKinds = ["raw-advisory", "normalized", "linkset"],
DefaultCron = "0 */4 * * *", // Every 4 hours
DefaultRpm = 30, // NVD rate limits
EgressAllowlist = ["services.nvd.nist.gov", "nvd.nist.gov"]
};
/// <summary>
/// GHSA (GitHub Security Advisories) connector metadata.
/// </summary>
public static ConnectorMetadata Ghsa => new()
{
ConnectorId = "ghsa",
Source = "ghsa",
DisplayName = "GHSA",
Description = "GitHub Security Advisories",
Capabilities = ["observations", "linksets"],
ArtifactKinds = ["raw-advisory", "normalized", "linkset"],
DefaultCron = "0 */2 * * *", // Every 2 hours
DefaultRpm = 5000, // GitHub GraphQL limits
EgressAllowlist = ["api.github.com"]
};
/// <summary>
/// OSV (Open Source Vulnerabilities) connector metadata.
/// </summary>
public static ConnectorMetadata Osv => new()
{
ConnectorId = "osv",
Source = "osv",
DisplayName = "OSV",
Description = "Google Open Source Vulnerabilities",
Capabilities = ["observations", "linksets"],
ArtifactKinds = ["raw-advisory", "normalized", "linkset"],
DefaultCron = "0 */1 * * *", // Every hour
DefaultRpm = 100,
EgressAllowlist = ["osv.dev", "api.osv.dev"]
};
/// <summary>
/// KEV (Known Exploited Vulnerabilities) connector metadata.
/// </summary>
public static ConnectorMetadata Kev => new()
{
ConnectorId = "kev",
Source = "kev",
DisplayName = "KEV",
Description = "CISA Known Exploited Vulnerabilities",
Capabilities = ["observations"],
ArtifactKinds = ["raw-advisory", "normalized"],
DefaultCron = "0 */6 * * *", // Every 6 hours
DefaultRpm = 60,
EgressAllowlist = ["www.cisa.gov"]
};
/// <summary>
/// ICS-CISA connector metadata.
/// </summary>
public static ConnectorMetadata IcsCisa => new()
{
ConnectorId = "icscisa",
Source = "icscisa",
DisplayName = "ICS-CISA",
Description = "CISA Industrial Control Systems Advisories",
Capabilities = ["observations", "linksets", "timeline"],
ArtifactKinds = ["raw-advisory", "normalized", "linkset"],
DefaultCron = "0 */12 * * *", // Every 12 hours
DefaultRpm = 30,
EgressAllowlist = ["www.cisa.gov", "us-cert.cisa.gov"]
};
/// <summary>
/// Gets metadata for all well-known connectors.
/// </summary>
public static IReadOnlyList<ConnectorMetadata> All => [Nvd, Ghsa, Osv, Kev, IcsCisa];
}

View File

@@ -0,0 +1,346 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Default implementation of <see cref="IConnectorWorker"/>.
/// Per CONCELIER-ORCH-32-002: Adopt orchestrator worker SDK in ingestion loops;
/// emit heartbeats/progress/artifact hashes for deterministic replays.
/// </summary>
public sealed class ConnectorWorker : IConnectorWorker
{
private readonly string _tenant;
private readonly string _connectorId;
private readonly IOrchestratorRegistryStore _store;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ConnectorWorker> _logger;
private readonly List<string> _artifactHashes = [];
private readonly object _lock = new();
private Guid _runId;
private long _sequence;
private OrchestratorHeartbeatStatus _status = OrchestratorHeartbeatStatus.Starting;
private OrchestratorThrottleOverride? _activeThrottle;
private long _lastAckedCommandSequence;
private bool _isPaused;
/// <inheritdoc />
public Guid RunId => _runId;
/// <inheritdoc />
public string ConnectorId => _connectorId;
/// <inheritdoc />
public OrchestratorHeartbeatStatus Status => _status;
public ConnectorWorker(
string tenant,
string connectorId,
IOrchestratorRegistryStore store,
TimeProvider timeProvider,
ILogger<ConnectorWorker> logger)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
ArgumentException.ThrowIfNullOrWhiteSpace(connectorId);
ArgumentNullException.ThrowIfNull(store);
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentNullException.ThrowIfNull(logger);
_tenant = tenant;
_connectorId = connectorId;
_store = store;
_timeProvider = timeProvider;
_logger = logger;
}
/// <inheritdoc />
public async Task StartRunAsync(CancellationToken cancellationToken)
{
_runId = Guid.NewGuid();
_sequence = 0;
_status = OrchestratorHeartbeatStatus.Starting;
_lastAckedCommandSequence = 0;
_isPaused = false;
lock (_lock)
{
_artifactHashes.Clear();
}
_logger.LogInformation(
"Starting connector run {RunId} for {ConnectorId} on tenant {Tenant}",
_runId, _connectorId, _tenant);
await EmitHeartbeatAsync(cancellationToken).ConfigureAwait(false);
_status = OrchestratorHeartbeatStatus.Running;
await EmitHeartbeatAsync(cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task ReportProgressAsync(
int progress,
string? artifactHash = null,
string? artifactKind = null,
CancellationToken cancellationToken = default)
{
if (progress < 0) progress = 0;
if (progress > 100) progress = 100;
if (!string.IsNullOrWhiteSpace(artifactHash))
{
RecordArtifact(artifactHash);
}
var heartbeat = new OrchestratorHeartbeatRecord(
_tenant,
_connectorId,
_runId,
Interlocked.Increment(ref _sequence),
_status,
progress,
null, // queueDepth
artifactHash,
artifactKind,
null, // errorCode
null, // retryAfterSeconds
_timeProvider.GetUtcNow());
await _store.AppendHeartbeatAsync(heartbeat, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task CompleteSuccessAsync(CancellationToken cancellationToken)
{
_status = OrchestratorHeartbeatStatus.Succeeded;
_logger.LogInformation(
"Connector run {RunId} for {ConnectorId} completed successfully with {ArtifactCount} artifacts",
_runId, _connectorId, _artifactHashes.Count);
await EmitHeartbeatAsync(100, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task CompleteFailureAsync(
string errorCode,
int? retryAfterSeconds = null,
CancellationToken cancellationToken = default)
{
_status = OrchestratorHeartbeatStatus.Failed;
_logger.LogWarning(
"Connector run {RunId} for {ConnectorId} failed with error {ErrorCode}",
_runId, _connectorId, errorCode);
var heartbeat = new OrchestratorHeartbeatRecord(
_tenant,
_connectorId,
_runId,
Interlocked.Increment(ref _sequence),
_status,
null, // progress
null, // queueDepth
null, // lastArtifactHash
null, // lastArtifactKind
errorCode,
retryAfterSeconds,
_timeProvider.GetUtcNow());
await _store.AppendHeartbeatAsync(heartbeat, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<bool> CheckContinueAsync(CancellationToken cancellationToken)
{
// Check for cancellation first
if (cancellationToken.IsCancellationRequested)
{
return false;
}
// Poll for pending commands
var commands = await _store.GetPendingCommandsAsync(
_tenant,
_connectorId,
_runId,
_lastAckedCommandSequence,
cancellationToken).ConfigureAwait(false);
foreach (var command in commands)
{
await ProcessCommandAsync(command, cancellationToken).ConfigureAwait(false);
_lastAckedCommandSequence = command.Sequence;
}
// If paused, wait for resume or cancellation
if (_isPaused)
{
_logger.LogInformation(
"Connector run {RunId} for {ConnectorId} is paused",
_runId, _connectorId);
// Keep checking for resume command
while (_isPaused && !cancellationToken.IsCancellationRequested)
{
await Task.Delay(TimeSpan.FromSeconds(5), cancellationToken).ConfigureAwait(false);
commands = await _store.GetPendingCommandsAsync(
_tenant,
_connectorId,
_runId,
_lastAckedCommandSequence,
cancellationToken).ConfigureAwait(false);
foreach (var cmd in commands)
{
await ProcessCommandAsync(cmd, cancellationToken).ConfigureAwait(false);
_lastAckedCommandSequence = cmd.Sequence;
}
}
}
return !cancellationToken.IsCancellationRequested && !_isPaused;
}
/// <inheritdoc />
public OrchestratorThrottleOverride? GetActiveThrottle()
{
if (_activeThrottle is null)
{
return null;
}
// Check if throttle has expired
if (_activeThrottle.ExpiresAt.HasValue && _activeThrottle.ExpiresAt.Value <= _timeProvider.GetUtcNow())
{
_activeThrottle = null;
return null;
}
return _activeThrottle;
}
/// <inheritdoc />
public void RecordArtifact(string artifactHash)
{
ArgumentException.ThrowIfNullOrWhiteSpace(artifactHash);
lock (_lock)
{
_artifactHashes.Add(artifactHash);
}
}
/// <inheritdoc />
public IReadOnlyList<string> GetArtifactHashes()
{
lock (_lock)
{
return _artifactHashes.ToList().AsReadOnly();
}
}
private async Task ProcessCommandAsync(OrchestratorCommandRecord command, CancellationToken cancellationToken)
{
_logger.LogInformation(
"Processing command {Command} (seq {Sequence}) for run {RunId}",
command.Command, command.Sequence, _runId);
switch (command.Command)
{
case OrchestratorCommandKind.Pause:
_isPaused = true;
_status = OrchestratorHeartbeatStatus.Paused;
await EmitHeartbeatAsync(cancellationToken).ConfigureAwait(false);
break;
case OrchestratorCommandKind.Resume:
_isPaused = false;
_status = OrchestratorHeartbeatStatus.Running;
await EmitHeartbeatAsync(cancellationToken).ConfigureAwait(false);
break;
case OrchestratorCommandKind.Throttle:
_activeThrottle = command.Throttle;
_status = OrchestratorHeartbeatStatus.Throttled;
await EmitHeartbeatAsync(cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Throttle applied for run {RunId}: RPM={Rpm}, Burst={Burst}, Cooldown={Cooldown}s, ExpiresAt={ExpiresAt}",
_runId,
_activeThrottle?.Rpm,
_activeThrottle?.Burst,
_activeThrottle?.CooldownSeconds,
_activeThrottle?.ExpiresAt);
break;
case OrchestratorCommandKind.Backfill:
_status = OrchestratorHeartbeatStatus.Backfill;
await EmitHeartbeatAsync(cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Backfill command received for run {RunId}: FromCursor={FromCursor}, ToCursor={ToCursor}",
_runId,
command.Backfill?.FromCursor,
command.Backfill?.ToCursor);
break;
}
}
private Task EmitHeartbeatAsync(CancellationToken cancellationToken) =>
EmitHeartbeatAsync(null, cancellationToken);
private async Task EmitHeartbeatAsync(int? progress, CancellationToken cancellationToken)
{
var heartbeat = new OrchestratorHeartbeatRecord(
_tenant,
_connectorId,
_runId,
Interlocked.Increment(ref _sequence),
_status,
progress,
null, // queueDepth
null, // lastArtifactHash
null, // lastArtifactKind
null, // errorCode
null, // retryAfterSeconds
_timeProvider.GetUtcNow());
await _store.AppendHeartbeatAsync(heartbeat, cancellationToken).ConfigureAwait(false);
}
}
/// <summary>
/// Factory implementation for creating connector workers.
/// </summary>
public sealed class ConnectorWorkerFactory : IConnectorWorkerFactory
{
private readonly IOrchestratorRegistryStore _store;
private readonly TimeProvider _timeProvider;
private readonly ILoggerFactory _loggerFactory;
public ConnectorWorkerFactory(
IOrchestratorRegistryStore store,
TimeProvider timeProvider,
ILoggerFactory loggerFactory)
{
ArgumentNullException.ThrowIfNull(store);
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentNullException.ThrowIfNull(loggerFactory);
_store = store;
_timeProvider = timeProvider;
_loggerFactory = loggerFactory;
}
/// <inheritdoc />
public IConnectorWorker CreateWorker(string tenant, string connectorId)
{
return new ConnectorWorker(
tenant,
connectorId,
_store,
_timeProvider,
_loggerFactory.CreateLogger<ConnectorWorker>());
}
}

View File

@@ -0,0 +1,147 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Worker interface for orchestrator-managed connector execution.
/// Per CONCELIER-ORCH-32-002: Adopt orchestrator worker SDK in ingestion loops;
/// emit heartbeats/progress/artifact hashes for deterministic replays.
/// </summary>
public interface IConnectorWorker
{
/// <summary>
/// Gets the current run ID.
/// </summary>
Guid RunId { get; }
/// <summary>
/// Gets the connector ID.
/// </summary>
string ConnectorId { get; }
/// <summary>
/// Gets the current status.
/// </summary>
OrchestratorHeartbeatStatus Status { get; }
/// <summary>
/// Starts a new connector run.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
Task StartRunAsync(CancellationToken cancellationToken);
/// <summary>
/// Reports progress during execution.
/// </summary>
/// <param name="progress">Progress percentage (0-100).</param>
/// <param name="artifactHash">Hash of the last produced artifact.</param>
/// <param name="artifactKind">Kind of the last produced artifact.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task ReportProgressAsync(int progress, string? artifactHash = null, string? artifactKind = null, CancellationToken cancellationToken = default);
/// <summary>
/// Reports a successful completion.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
Task CompleteSuccessAsync(CancellationToken cancellationToken);
/// <summary>
/// Reports a failure.
/// </summary>
/// <param name="errorCode">Error code.</param>
/// <param name="retryAfterSeconds">Suggested retry delay.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task CompleteFailureAsync(string errorCode, int? retryAfterSeconds = null, CancellationToken cancellationToken = default);
/// <summary>
/// Checks if the worker should pause or stop based on orchestrator commands.
/// Per CONCELIER-ORCH-33-001: Honor orchestrator pause/throttle/retry controls.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if execution should continue, false if paused or stopped.</returns>
Task<bool> CheckContinueAsync(CancellationToken cancellationToken);
/// <summary>
/// Gets any pending throttle override.
/// </summary>
OrchestratorThrottleOverride? GetActiveThrottle();
/// <summary>
/// Records an artifact hash for the current run.
/// </summary>
/// <param name="artifactHash">The artifact hash.</param>
void RecordArtifact(string artifactHash);
/// <summary>
/// Gets all recorded artifact hashes for the current run.
/// </summary>
IReadOnlyList<string> GetArtifactHashes();
}
/// <summary>
/// Factory for creating connector workers.
/// </summary>
public interface IConnectorWorkerFactory
{
/// <summary>
/// Creates a worker for the specified connector and tenant.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <returns>A new connector worker instance.</returns>
IConnectorWorker CreateWorker(string tenant, string connectorId);
}
/// <summary>
/// Context for connector execution with orchestrator integration.
/// </summary>
public sealed class ConnectorExecutionContext
{
/// <summary>
/// Gets the worker managing this execution.
/// </summary>
public required IConnectorWorker Worker { get; init; }
/// <summary>
/// Gets the tenant identifier.
/// </summary>
public required string Tenant { get; init; }
/// <summary>
/// Gets the run identifier.
/// </summary>
public Guid RunId => Worker.RunId;
/// <summary>
/// Gets the connector identifier.
/// </summary>
public string ConnectorId => Worker.ConnectorId;
/// <summary>
/// Optional backfill range (for CONCELIER-ORCH-34-001).
/// </summary>
public OrchestratorBackfillRange? BackfillRange { get; init; }
/// <summary>
/// Computes a deterministic SHA-256 hash of the given content.
/// </summary>
/// <param name="content">Content to hash.</param>
/// <returns>Hex-encoded SHA-256 hash.</returns>
public static string ComputeHash(string content)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return Convert.ToHexString(bytes).ToLowerInvariant();
}
/// <summary>
/// Computes a deterministic SHA-256 hash of the given bytes.
/// </summary>
/// <param name="bytes">Bytes to hash.</param>
/// <returns>Hex-encoded SHA-256 hash.</returns>
public static string ComputeHash(byte[] bytes)
{
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,102 @@
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Storage interface for orchestrator registry, heartbeat, and command records.
/// Per CONCELIER-ORCH-32-001: Register every advisory connector with orchestrator
/// (metadata, auth scopes, rate policies) for transparent, reproducible scheduling.
/// </summary>
public interface IOrchestratorRegistryStore
{
/// <summary>
/// Upserts a connector registry record.
/// Creates new record if not exists, updates existing if connectorId+tenant matches.
/// </summary>
/// <param name="record">The registry record to upsert.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task UpsertAsync(OrchestratorRegistryRecord record, CancellationToken cancellationToken);
/// <summary>
/// Gets a connector registry record by tenant and connectorId.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The registry record, or null if not found.</returns>
Task<OrchestratorRegistryRecord?> GetAsync(string tenant, string connectorId, CancellationToken cancellationToken);
/// <summary>
/// Lists all connector registry records for a tenant.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>All registry records for the tenant.</returns>
Task<IReadOnlyList<OrchestratorRegistryRecord>> ListAsync(string tenant, CancellationToken cancellationToken);
/// <summary>
/// Appends a heartbeat record from a running connector.
/// Heartbeats are append-only; stale sequences should be ignored by consumers.
/// </summary>
/// <param name="heartbeat">The heartbeat record to append.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task AppendHeartbeatAsync(OrchestratorHeartbeatRecord heartbeat, CancellationToken cancellationToken);
/// <summary>
/// Gets the latest heartbeat for a connector run.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The latest heartbeat, or null if no heartbeats exist.</returns>
Task<OrchestratorHeartbeatRecord?> GetLatestHeartbeatAsync(
string tenant,
string connectorId,
Guid runId,
CancellationToken cancellationToken);
/// <summary>
/// Enqueues a command for a connector run.
/// </summary>
/// <param name="command">The command record to enqueue.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task EnqueueCommandAsync(OrchestratorCommandRecord command, CancellationToken cancellationToken);
/// <summary>
/// Gets pending commands for a connector run.
/// Commands with sequence greater than afterSequence are returned.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="runId">Run identifier.</param>
/// <param name="afterSequence">Return commands with sequence greater than this value (null for all).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Pending commands ordered by sequence.</returns>
Task<IReadOnlyList<OrchestratorCommandRecord>> GetPendingCommandsAsync(
string tenant,
string connectorId,
Guid runId,
long? afterSequence,
CancellationToken cancellationToken);
/// <summary>
/// Stores a run manifest for backfill/replay evidence.
/// Per prep doc: Manifests are written to Evidence Locker ledger for replay.
/// </summary>
/// <param name="manifest">The run manifest to store.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task StoreManifestAsync(OrchestratorRunManifest manifest, CancellationToken cancellationToken);
/// <summary>
/// Gets a run manifest by run identifier.
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="connectorId">Connector identifier.</param>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The run manifest, or null if not found.</returns>
Task<OrchestratorRunManifest?> GetManifestAsync(
string tenant,
string connectorId,
Guid runId,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,143 @@
using System.Collections.Concurrent;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// In-memory implementation of orchestrator registry store for testing and development.
/// Production deployments should use a persistent store (MongoDB, etc.).
/// </summary>
public sealed class InMemoryOrchestratorRegistryStore : IOrchestratorRegistryStore
{
private readonly ConcurrentDictionary<(string Tenant, string ConnectorId), OrchestratorRegistryRecord> _registry = new();
private readonly ConcurrentDictionary<(string Tenant, string ConnectorId, Guid RunId), List<OrchestratorHeartbeatRecord>> _heartbeats = new();
private readonly ConcurrentDictionary<(string Tenant, string ConnectorId, Guid RunId), List<OrchestratorCommandRecord>> _commands = new();
private readonly ConcurrentDictionary<(string Tenant, string ConnectorId, Guid RunId), OrchestratorRunManifest> _manifests = new();
/// <inheritdoc />
public Task UpsertAsync(OrchestratorRegistryRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
_registry[(record.Tenant, record.ConnectorId)] = record;
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<OrchestratorRegistryRecord?> GetAsync(string tenant, string connectorId, CancellationToken cancellationToken)
{
_registry.TryGetValue((tenant, connectorId), out var record);
return Task.FromResult(record);
}
/// <inheritdoc />
public Task<IReadOnlyList<OrchestratorRegistryRecord>> ListAsync(string tenant, CancellationToken cancellationToken)
{
var records = _registry.Values
.Where(r => r.Tenant == tenant)
.OrderBy(r => r.ConnectorId)
.ToList()
.AsReadOnly();
return Task.FromResult<IReadOnlyList<OrchestratorRegistryRecord>>(records);
}
/// <inheritdoc />
public Task AppendHeartbeatAsync(OrchestratorHeartbeatRecord heartbeat, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(heartbeat);
var key = (heartbeat.Tenant, heartbeat.ConnectorId, heartbeat.RunId);
var heartbeats = _heartbeats.GetOrAdd(key, _ => new List<OrchestratorHeartbeatRecord>());
lock (heartbeats)
{
heartbeats.Add(heartbeat);
}
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<OrchestratorHeartbeatRecord?> GetLatestHeartbeatAsync(
string tenant,
string connectorId,
Guid runId,
CancellationToken cancellationToken)
{
if (!_heartbeats.TryGetValue((tenant, connectorId, runId), out var heartbeats))
{
return Task.FromResult<OrchestratorHeartbeatRecord?>(null);
}
lock (heartbeats)
{
var latest = heartbeats.OrderByDescending(h => h.Sequence).FirstOrDefault();
return Task.FromResult<OrchestratorHeartbeatRecord?>(latest);
}
}
/// <inheritdoc />
public Task EnqueueCommandAsync(OrchestratorCommandRecord command, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(command);
var key = (command.Tenant, command.ConnectorId, command.RunId);
var commands = _commands.GetOrAdd(key, _ => new List<OrchestratorCommandRecord>());
lock (commands)
{
commands.Add(command);
}
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<IReadOnlyList<OrchestratorCommandRecord>> GetPendingCommandsAsync(
string tenant,
string connectorId,
Guid runId,
long? afterSequence,
CancellationToken cancellationToken)
{
if (!_commands.TryGetValue((tenant, connectorId, runId), out var commands))
{
return Task.FromResult<IReadOnlyList<OrchestratorCommandRecord>>(Array.Empty<OrchestratorCommandRecord>());
}
lock (commands)
{
var now = DateTimeOffset.UtcNow;
var pending = commands
.Where(c => (afterSequence is null || c.Sequence > afterSequence)
&& (c.ExpiresAt is null || c.ExpiresAt > now))
.OrderBy(c => c.Sequence)
.ToList()
.AsReadOnly();
return Task.FromResult<IReadOnlyList<OrchestratorCommandRecord>>(pending);
}
}
/// <inheritdoc />
public Task StoreManifestAsync(OrchestratorRunManifest manifest, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(manifest);
var key = (manifest.Tenant, manifest.ConnectorId, manifest.RunId);
_manifests[key] = manifest;
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<OrchestratorRunManifest?> GetManifestAsync(
string tenant,
string connectorId,
Guid runId,
CancellationToken cancellationToken)
{
_manifests.TryGetValue((tenant, connectorId, runId), out var manifest);
return Task.FromResult(manifest);
}
/// <summary>
/// Clears all stored data. Useful for test isolation.
/// </summary>
public void Clear()
{
_registry.Clear();
_heartbeats.Clear();
_commands.Clear();
_manifests.Clear();
}
}

View File

@@ -0,0 +1,47 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Service collection extensions for orchestration-related services.
/// </summary>
public static class OrchestrationServiceCollectionExtensions
{
/// <summary>
/// Adds orchestrator registry services to the service collection.
/// Per CONCELIER-ORCH-32-001: Register every advisory connector with orchestrator
/// (metadata, auth scopes, rate policies) for transparent, reproducible scheduling.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierOrchestrationServices(this IServiceCollection services)
{
// Register in-memory store by default; replace with persistent store in production
services.TryAddSingleton<IOrchestratorRegistryStore, InMemoryOrchestratorRegistryStore>();
// CONCELIER-ORCH-32-001: Connector registration service
services.TryAddSingleton<IConnectorRegistrationService, ConnectorRegistrationService>();
// CONCELIER-ORCH-32-002: Worker SDK for heartbeats/progress
services.TryAddSingleton<IConnectorWorkerFactory, ConnectorWorkerFactory>();
// CONCELIER-ORCH-34-001: Backfill executor
services.TryAddSingleton<IBackfillExecutor, BackfillExecutor>();
return services;
}
/// <summary>
/// Adds a custom implementation of <see cref="IOrchestratorRegistryStore"/>.
/// </summary>
/// <typeparam name="TStore">The store implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddOrchestratorRegistryStore<TStore>(this IServiceCollection services)
where TStore : class, IOrchestratorRegistryStore
{
services.AddSingleton<IOrchestratorRegistryStore, TStore>();
return services;
}
}

View File

@@ -0,0 +1,222 @@
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Status of a connector heartbeat per orchestrator control contract.
/// Per CONCELIER-ORCH-32-001 prep doc at docs/modules/concelier/prep/2025-11-20-orchestrator-registry-prep.md.
/// </summary>
public enum OrchestratorHeartbeatStatus
{
Starting,
Running,
Paused,
Throttled,
Backfill,
Failed,
Succeeded
}
/// <summary>
/// Command kinds for orchestrator control messages.
/// </summary>
public enum OrchestratorCommandKind
{
Pause,
Resume,
Throttle,
Backfill
}
/// <summary>
/// Advisory source types for connector registration.
/// </summary>
public enum OrchestratorSourceKind
{
Nvd,
Ghsa,
Osv,
IcsCisa,
Kisa,
Vendor
}
/// <summary>
/// Connector capability flags.
/// </summary>
public enum OrchestratorCapability
{
Observations,
Linksets,
Timeline,
Attestations
}
/// <summary>
/// Artifact kinds produced by connectors.
/// </summary>
public enum OrchestratorArtifactKind
{
RawAdvisory,
Normalized,
Linkset,
Timeline,
Attestation
}
/// <summary>
/// Schedule configuration for a connector.
/// </summary>
/// <param name="Cron">Cron expression for scheduling.</param>
/// <param name="TimeZone">IANA time zone identifier (default: UTC).</param>
/// <param name="MaxParallelRuns">Maximum concurrent runs allowed.</param>
/// <param name="MaxLagMinutes">Maximum lag before alert/retry triggers.</param>
public sealed record OrchestratorSchedule(
string Cron,
string TimeZone,
int MaxParallelRuns,
int MaxLagMinutes);
/// <summary>
/// Rate policy for connector execution.
/// </summary>
/// <param name="Rpm">Requests per minute limit.</param>
/// <param name="Burst">Burst capacity above steady-state RPM.</param>
/// <param name="CooldownSeconds">Cooldown period after burst exhaustion.</param>
public sealed record OrchestratorRatePolicy(
int Rpm,
int Burst,
int CooldownSeconds);
/// <summary>
/// Egress guard configuration for airgap/sealed-mode enforcement.
/// </summary>
/// <param name="Allowlist">Allowed destination hosts.</param>
/// <param name="AirgapMode">When true, block all hosts not in allowlist.</param>
public sealed record OrchestratorEgressGuard(
IReadOnlyList<string> Allowlist,
bool AirgapMode);
/// <summary>
/// Throttle override for runtime rate limiting adjustments.
/// </summary>
/// <param name="Rpm">Overridden RPM limit.</param>
/// <param name="Burst">Overridden burst capacity.</param>
/// <param name="CooldownSeconds">Overridden cooldown period.</param>
/// <param name="ExpiresAt">When the override expires.</param>
public sealed record OrchestratorThrottleOverride(
int? Rpm,
int? Burst,
int? CooldownSeconds,
DateTimeOffset? ExpiresAt);
/// <summary>
/// Backfill range for cursor-based replay.
/// </summary>
/// <param name="FromCursor">Start of backfill range (inclusive).</param>
/// <param name="ToCursor">End of backfill range (inclusive).</param>
public sealed record OrchestratorBackfillRange(
string? FromCursor,
string? ToCursor);
/// <summary>
/// Registry record for a connector.
/// Per prep doc: documents live under the orchestrator collection keyed by connectorId (stable slug).
/// </summary>
/// <param name="Tenant">Tenant identifier; required.</param>
/// <param name="ConnectorId">Unique identifier per tenant + source; immutable, lowercase slug.</param>
/// <param name="Source">Advisory provider source (nvd, ghsa, osv, icscisa, kisa, vendor:slug).</param>
/// <param name="Capabilities">Capability flags: observations, linksets, timeline, attestations.</param>
/// <param name="AuthRef">Reference to secrets store key; never inlined.</param>
/// <param name="Schedule">Scheduling configuration.</param>
/// <param name="RatePolicy">Rate limiting configuration.</param>
/// <param name="ArtifactKinds">Types of artifacts this connector produces.</param>
/// <param name="LockKey">Deterministic lock namespace (concelier:{tenant}:{connectorId}) for single-flight.</param>
/// <param name="EgressGuard">Egress/airgap configuration.</param>
/// <param name="CreatedAt">Record creation timestamp (UTC).</param>
/// <param name="UpdatedAt">Last update timestamp (UTC).</param>
public sealed record OrchestratorRegistryRecord(
string Tenant,
string ConnectorId,
string Source,
IReadOnlyList<string> Capabilities,
string AuthRef,
OrchestratorSchedule Schedule,
OrchestratorRatePolicy RatePolicy,
IReadOnlyList<string> ArtifactKinds,
string LockKey,
OrchestratorEgressGuard EgressGuard,
DateTimeOffset CreatedAt,
DateTimeOffset UpdatedAt);
/// <summary>
/// Heartbeat record from a running connector.
/// Per prep doc: Heartbeat endpoint POST /internal/orch/heartbeat (auth: internal orchestrator role, tenant-scoped).
/// </summary>
/// <param name="Tenant">Tenant identifier.</param>
/// <param name="ConnectorId">Connector identifier.</param>
/// <param name="RunId">Unique run identifier (GUID).</param>
/// <param name="Sequence">Monotonic sequence number for ordering.</param>
/// <param name="Status">Current run status.</param>
/// <param name="Progress">Progress percentage (0-100).</param>
/// <param name="QueueDepth">Current queue depth.</param>
/// <param name="LastArtifactHash">Hash of last produced artifact.</param>
/// <param name="LastArtifactKind">Kind of last produced artifact.</param>
/// <param name="ErrorCode">Error code if status is Failed.</param>
/// <param name="RetryAfterSeconds">Suggested retry delay on failure.</param>
/// <param name="TimestampUtc">Heartbeat timestamp (UTC).</param>
public sealed record OrchestratorHeartbeatRecord(
string Tenant,
string ConnectorId,
Guid RunId,
long Sequence,
OrchestratorHeartbeatStatus Status,
int? Progress,
int? QueueDepth,
string? LastArtifactHash,
string? LastArtifactKind,
string? ErrorCode,
int? RetryAfterSeconds,
DateTimeOffset TimestampUtc);
/// <summary>
/// Command record for orchestrator control messages.
/// Per prep doc: Commands: pause, resume, throttle (rpm/burst override until expiresAt), backfill (range: fromCursor/toCursor).
/// </summary>
/// <param name="Tenant">Tenant identifier.</param>
/// <param name="ConnectorId">Connector identifier.</param>
/// <param name="RunId">Target run identifier.</param>
/// <param name="Sequence">Command sequence for ordering.</param>
/// <param name="Command">Command kind.</param>
/// <param name="Throttle">Throttle override parameters (for Throttle command).</param>
/// <param name="Backfill">Backfill range parameters (for Backfill command).</param>
/// <param name="CreatedAt">Command creation timestamp (UTC).</param>
/// <param name="ExpiresAt">When the command expires.</param>
public sealed record OrchestratorCommandRecord(
string Tenant,
string ConnectorId,
Guid RunId,
long Sequence,
OrchestratorCommandKind Command,
OrchestratorThrottleOverride? Throttle,
OrchestratorBackfillRange? Backfill,
DateTimeOffset CreatedAt,
DateTimeOffset? ExpiresAt);
/// <summary>
/// Run manifest for backfill/replay evidence.
/// Per prep doc: Worker must emit a runManifest per backfill containing: runId, connectorId, tenant, cursorRange, artifactHashes[], dsseEnvelopeHash, completedAt.
/// </summary>
/// <param name="RunId">Unique run identifier.</param>
/// <param name="ConnectorId">Connector identifier.</param>
/// <param name="Tenant">Tenant identifier.</param>
/// <param name="CursorRange">Cursor range covered by this run.</param>
/// <param name="ArtifactHashes">Hashes of all artifacts produced.</param>
/// <param name="DsseEnvelopeHash">DSSE envelope hash if attested.</param>
/// <param name="CompletedAt">Run completion timestamp (UTC).</param>
public sealed record OrchestratorRunManifest(
Guid RunId,
string ConnectorId,
string Tenant,
OrchestratorBackfillRange CursorRange,
IReadOnlyList<string> ArtifactHashes,
string? DsseEnvelopeHash,
DateTimeOffset CompletedAt);

View File

@@ -0,0 +1,268 @@
using System.Diagnostics;
using System.Diagnostics.Metrics;
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Core.Orchestration;
/// <summary>
/// Telemetry for orchestrator operations.
/// Per prep doc: Meter name prefix: StellaOps.Concelier.Orch.
/// </summary>
public sealed class OrchestratorTelemetry : IDisposable
{
public const string MeterName = "StellaOps.Concelier.Orch";
public const string ActivitySourceName = "StellaOps.Concelier.Orch";
private readonly Meter _meter;
private readonly Counter<long> _heartbeatCounter;
private readonly Counter<long> _commandAppliedCounter;
private readonly Histogram<double> _lagHistogram;
private readonly Counter<long> _registrationCounter;
private readonly Counter<long> _backfillStepCounter;
private readonly Histogram<double> _backfillDurationHistogram;
public static readonly ActivitySource ActivitySource = new(ActivitySourceName, "1.0.0");
public OrchestratorTelemetry(IMeterFactory meterFactory)
{
ArgumentNullException.ThrowIfNull(meterFactory);
_meter = meterFactory.Create(MeterName);
// Per prep doc: concelier.orch.heartbeat tags: tenant, connectorId, status
_heartbeatCounter = _meter.CreateCounter<long>(
"concelier.orch.heartbeat",
unit: "{heartbeat}",
description: "Number of heartbeats received from connectors");
// Per prep doc: concelier.orch.command.applied tags: tenant, connectorId, command
_commandAppliedCounter = _meter.CreateCounter<long>(
"concelier.orch.command.applied",
unit: "{command}",
description: "Number of commands applied to connectors");
// Per prep doc: concelier.orch.lag.minutes (now - cursor upper bound) tags: tenant, connectorId
_lagHistogram = _meter.CreateHistogram<double>(
"concelier.orch.lag.minutes",
unit: "min",
description: "Lag in minutes between current time and cursor upper bound");
_registrationCounter = _meter.CreateCounter<long>(
"concelier.orch.registration",
unit: "{registration}",
description: "Number of connector registrations");
_backfillStepCounter = _meter.CreateCounter<long>(
"concelier.orch.backfill.step",
unit: "{step}",
description: "Number of backfill steps executed");
_backfillDurationHistogram = _meter.CreateHistogram<double>(
"concelier.orch.backfill.duration",
unit: "s",
description: "Duration of backfill operations in seconds");
}
/// <summary>
/// Records a heartbeat.
/// </summary>
public void RecordHeartbeat(string tenant, string connectorId, OrchestratorHeartbeatStatus status)
{
_heartbeatCounter.Add(1,
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("connectorId", connectorId),
new KeyValuePair<string, object?>("status", status.ToString().ToLowerInvariant()));
}
/// <summary>
/// Records a command application.
/// </summary>
public void RecordCommandApplied(string tenant, string connectorId, OrchestratorCommandKind command)
{
_commandAppliedCounter.Add(1,
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("connectorId", connectorId),
new KeyValuePair<string, object?>("command", command.ToString().ToLowerInvariant()));
}
/// <summary>
/// Records connector lag.
/// </summary>
public void RecordLag(string tenant, string connectorId, double lagMinutes)
{
_lagHistogram.Record(lagMinutes,
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("connectorId", connectorId));
}
/// <summary>
/// Records a connector registration.
/// </summary>
public void RecordRegistration(string tenant, string connectorId)
{
_registrationCounter.Add(1,
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("connectorId", connectorId));
}
/// <summary>
/// Records a backfill step.
/// </summary>
public void RecordBackfillStep(string tenant, string connectorId, bool success)
{
_backfillStepCounter.Add(1,
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("connectorId", connectorId),
new KeyValuePair<string, object?>("success", success));
}
/// <summary>
/// Records backfill duration.
/// </summary>
public void RecordBackfillDuration(string tenant, string connectorId, double durationSeconds)
{
_backfillDurationHistogram.Record(durationSeconds,
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("connectorId", connectorId));
}
// Activity helpers
/// <summary>
/// Starts a connector run activity.
/// </summary>
public static Activity? StartConnectorRun(string tenant, string connectorId, Guid runId)
{
var activity = ActivitySource.StartActivity("concelier.orch.connector.run", ActivityKind.Internal);
activity?.SetTag("tenant", tenant);
activity?.SetTag("connectorId", connectorId);
activity?.SetTag("runId", runId.ToString());
return activity;
}
/// <summary>
/// Starts a heartbeat activity.
/// </summary>
public static Activity? StartHeartbeat(string tenant, string connectorId, Guid runId)
{
var activity = ActivitySource.StartActivity("concelier.orch.heartbeat", ActivityKind.Internal);
activity?.SetTag("tenant", tenant);
activity?.SetTag("connectorId", connectorId);
activity?.SetTag("runId", runId.ToString());
return activity;
}
/// <summary>
/// Starts a command processing activity.
/// </summary>
public static Activity? StartCommandProcessing(string tenant, string connectorId, OrchestratorCommandKind command)
{
var activity = ActivitySource.StartActivity("concelier.orch.command.process", ActivityKind.Internal);
activity?.SetTag("tenant", tenant);
activity?.SetTag("connectorId", connectorId);
activity?.SetTag("command", command.ToString().ToLowerInvariant());
return activity;
}
/// <summary>
/// Starts a backfill activity.
/// </summary>
public static Activity? StartBackfill(string tenant, string connectorId, Guid runId)
{
var activity = ActivitySource.StartActivity("concelier.orch.backfill", ActivityKind.Internal);
activity?.SetTag("tenant", tenant);
activity?.SetTag("connectorId", connectorId);
activity?.SetTag("runId", runId.ToString());
return activity;
}
/// <summary>
/// Starts a registration activity.
/// </summary>
public static Activity? StartRegistration(string tenant, string connectorId)
{
var activity = ActivitySource.StartActivity("concelier.orch.registration", ActivityKind.Internal);
activity?.SetTag("tenant", tenant);
activity?.SetTag("connectorId", connectorId);
return activity;
}
public void Dispose()
{
_meter.Dispose();
}
}
/// <summary>
/// Log event IDs for orchestrator operations.
/// </summary>
public static class OrchestratorLogEvents
{
// Registration (2000-2099)
public static readonly EventId RegistrationStarted = new(2000, "RegistrationStarted");
public static readonly EventId RegistrationCompleted = new(2001, "RegistrationCompleted");
public static readonly EventId RegistrationFailed = new(2002, "RegistrationFailed");
// Run lifecycle (2100-2199)
public static readonly EventId RunStarted = new(2100, "RunStarted");
public static readonly EventId RunCompleted = new(2101, "RunCompleted");
public static readonly EventId RunFailed = new(2102, "RunFailed");
public static readonly EventId RunPaused = new(2103, "RunPaused");
public static readonly EventId RunResumed = new(2104, "RunResumed");
public static readonly EventId RunThrottled = new(2105, "RunThrottled");
// Heartbeats (2200-2299)
public static readonly EventId HeartbeatReceived = new(2200, "HeartbeatReceived");
public static readonly EventId HeartbeatMissed = new(2201, "HeartbeatMissed");
public static readonly EventId HeartbeatStale = new(2202, "HeartbeatStale");
// Commands (2300-2399)
public static readonly EventId CommandEnqueued = new(2300, "CommandEnqueued");
public static readonly EventId CommandApplied = new(2301, "CommandApplied");
public static readonly EventId CommandExpired = new(2302, "CommandExpired");
public static readonly EventId CommandFailed = new(2303, "CommandFailed");
// Backfill (2400-2499)
public static readonly EventId BackfillStarted = new(2400, "BackfillStarted");
public static readonly EventId BackfillStepCompleted = new(2401, "BackfillStepCompleted");
public static readonly EventId BackfillCompleted = new(2402, "BackfillCompleted");
public static readonly EventId BackfillFailed = new(2403, "BackfillFailed");
public static readonly EventId ManifestCreated = new(2410, "ManifestCreated");
}
/// <summary>
/// Log message templates for orchestrator operations.
/// </summary>
public static class OrchestratorLogMessages
{
// Registration
public const string RegistrationStarted = "Starting connector registration for {ConnectorId} on tenant {Tenant}";
public const string RegistrationCompleted = "Connector {ConnectorId} registered successfully for tenant {Tenant}";
public const string RegistrationFailed = "Failed to register connector {ConnectorId} for tenant {Tenant}: {Error}";
// Run lifecycle
public const string RunStarted = "Connector run {RunId} started for {ConnectorId} on tenant {Tenant}";
public const string RunCompleted = "Connector run {RunId} completed for {ConnectorId}: {ArtifactCount} artifacts";
public const string RunFailed = "Connector run {RunId} failed for {ConnectorId}: {ErrorCode}";
public const string RunPaused = "Connector run {RunId} paused for {ConnectorId}";
public const string RunResumed = "Connector run {RunId} resumed for {ConnectorId}";
public const string RunThrottled = "Connector run {RunId} throttled for {ConnectorId}: RPM={Rpm}";
// Heartbeats
public const string HeartbeatReceived = "Heartbeat received for run {RunId}: status={Status}, progress={Progress}%";
public const string HeartbeatMissed = "Heartbeat missed for run {RunId} on {ConnectorId}";
public const string HeartbeatStale = "Stale heartbeat ignored for run {RunId}: sequence {Sequence} < {LastSequence}";
// Commands
public const string CommandEnqueued = "Command {Command} enqueued for run {RunId} with sequence {Sequence}";
public const string CommandApplied = "Command {Command} applied to run {RunId}";
public const string CommandExpired = "Command {Command} expired for run {RunId}";
public const string CommandFailed = "Failed to apply command {Command} to run {RunId}: {Error}";
// Backfill
public const string BackfillStarted = "Backfill started for {ConnectorId} run {RunId}: [{FromCursor}, {ToCursor}]";
public const string BackfillStepCompleted = "Backfill step {StepNumber} completed: {ArtifactCount} artifacts";
public const string BackfillCompleted = "Backfill completed for {ConnectorId} run {RunId}: {TotalSteps} steps, {TotalArtifacts} artifacts";
public const string BackfillFailed = "Backfill failed for {ConnectorId} run {RunId} at step {StepNumber}: {Error}";
public const string ManifestCreated = "Manifest created for run {RunId}: DSSE hash {DsseHash}";
}

View File

@@ -0,0 +1,398 @@
using System;
using System.Collections.Immutable;
namespace StellaOps.Concelier.Core.Signals;
/// <summary>
/// Upstream-provided affected symbol/function for an advisory.
/// Per CONCELIER-SIG-26-001, exposes symbols for reachability scoring
/// while maintaining provenance and avoiding exploitability inference.
/// </summary>
/// <remarks>
/// This model is fact-only: symbols/functions are surfaced exactly as
/// published by the upstream source with full provenance anchors.
/// </remarks>
public sealed record AffectedSymbol(
/// <summary>Tenant identifier.</summary>
string TenantId,
/// <summary>Advisory identifier (e.g., CVE-2024-1234).</summary>
string AdvisoryId,
/// <summary>Source observation identifier.</summary>
string ObservationId,
/// <summary>Fully qualified symbol name (e.g., "lodash.template").</summary>
string Symbol,
/// <summary>Type of symbol.</summary>
AffectedSymbolType SymbolType,
/// <summary>Package URL if available.</summary>
string? Purl,
/// <summary>Module/namespace containing the symbol.</summary>
string? Module,
/// <summary>Class/type containing the symbol (for methods).</summary>
string? ClassName,
/// <summary>File path relative to package root.</summary>
string? FilePath,
/// <summary>Line number in source file.</summary>
int? LineNumber,
/// <summary>Affected version range expression.</summary>
string? VersionRange,
/// <summary>Provenance anchor for traceability.</summary>
AffectedSymbolProvenance Provenance,
/// <summary>Additional attributes from upstream.</summary>
ImmutableDictionary<string, string>? Attributes,
/// <summary>When this symbol was extracted.</summary>
DateTimeOffset ExtractedAt)
{
/// <summary>
/// Creates a function symbol.
/// </summary>
public static AffectedSymbol Function(
string tenantId,
string advisoryId,
string observationId,
string symbol,
AffectedSymbolProvenance provenance,
DateTimeOffset extractedAt,
string? purl = null,
string? module = null,
string? filePath = null,
int? lineNumber = null,
string? versionRange = null)
{
return new AffectedSymbol(
TenantId: tenantId,
AdvisoryId: advisoryId,
ObservationId: observationId,
Symbol: symbol,
SymbolType: AffectedSymbolType.Function,
Purl: purl,
Module: module,
ClassName: null,
FilePath: filePath,
LineNumber: lineNumber,
VersionRange: versionRange,
Provenance: provenance,
Attributes: null,
ExtractedAt: extractedAt);
}
/// <summary>
/// Creates a method symbol.
/// </summary>
public static AffectedSymbol Method(
string tenantId,
string advisoryId,
string observationId,
string symbol,
string className,
AffectedSymbolProvenance provenance,
DateTimeOffset extractedAt,
string? purl = null,
string? module = null,
string? filePath = null,
int? lineNumber = null,
string? versionRange = null)
{
return new AffectedSymbol(
TenantId: tenantId,
AdvisoryId: advisoryId,
ObservationId: observationId,
Symbol: symbol,
SymbolType: AffectedSymbolType.Method,
Purl: purl,
Module: module,
ClassName: className,
FilePath: filePath,
LineNumber: lineNumber,
VersionRange: versionRange,
Provenance: provenance,
Attributes: null,
ExtractedAt: extractedAt);
}
/// <summary>
/// Generates a canonical identifier for this symbol.
/// </summary>
public string CanonicalId => SymbolType switch
{
AffectedSymbolType.Method when ClassName is not null =>
$"{Module ?? "global"}::{ClassName}.{Symbol}",
AffectedSymbolType.Function =>
$"{Module ?? "global"}::{Symbol}",
AffectedSymbolType.Class =>
$"{Module ?? "global"}::{Symbol}",
AffectedSymbolType.Module =>
Symbol,
_ => Symbol
};
/// <summary>
/// Indicates if this symbol has source location information.
/// </summary>
public bool HasSourceLocation => FilePath is not null || LineNumber is not null;
}
/// <summary>
/// Type of affected symbol.
/// </summary>
public enum AffectedSymbolType
{
/// <summary>Unknown symbol type.</summary>
Unknown,
/// <summary>Standalone function.</summary>
Function,
/// <summary>Class method.</summary>
Method,
/// <summary>Affected class/type.</summary>
Class,
/// <summary>Affected module/namespace.</summary>
Module,
/// <summary>Affected package (entire package vulnerable).</summary>
Package,
/// <summary>Affected API endpoint.</summary>
Endpoint
}
/// <summary>
/// Provenance anchor for affected symbol data.
/// </summary>
public sealed record AffectedSymbolProvenance(
/// <summary>Upstream source identifier (e.g., "osv", "nvd", "ghsa").</summary>
string Source,
/// <summary>Vendor/organization that published the data.</summary>
string Vendor,
/// <summary>Hash of the source observation.</summary>
string ObservationHash,
/// <summary>When the data was fetched from upstream.</summary>
DateTimeOffset FetchedAt,
/// <summary>Ingest job identifier if available.</summary>
string? IngestJobId,
/// <summary>Upstream identifier for cross-reference.</summary>
string? UpstreamId,
/// <summary>URL to the upstream advisory.</summary>
string? UpstreamUrl)
{
/// <summary>
/// Creates provenance from OSV data.
/// </summary>
public static AffectedSymbolProvenance FromOsv(
string observationHash,
DateTimeOffset fetchedAt,
string? ingestJobId = null,
string? osvId = null)
{
return new AffectedSymbolProvenance(
Source: "osv",
Vendor: "open-source-vulnerabilities",
ObservationHash: observationHash,
FetchedAt: fetchedAt,
IngestJobId: ingestJobId,
UpstreamId: osvId,
UpstreamUrl: osvId is not null ? $"https://osv.dev/vulnerability/{osvId}" : null);
}
/// <summary>
/// Creates provenance from NVD data.
/// </summary>
public static AffectedSymbolProvenance FromNvd(
string observationHash,
DateTimeOffset fetchedAt,
string? ingestJobId = null,
string? cveId = null)
{
return new AffectedSymbolProvenance(
Source: "nvd",
Vendor: "national-vulnerability-database",
ObservationHash: observationHash,
FetchedAt: fetchedAt,
IngestJobId: ingestJobId,
UpstreamId: cveId,
UpstreamUrl: cveId is not null ? $"https://nvd.nist.gov/vuln/detail/{cveId}" : null);
}
/// <summary>
/// Creates provenance from GitHub Security Advisory.
/// </summary>
public static AffectedSymbolProvenance FromGhsa(
string observationHash,
DateTimeOffset fetchedAt,
string? ingestJobId = null,
string? ghsaId = null)
{
return new AffectedSymbolProvenance(
Source: "ghsa",
Vendor: "github-security-advisories",
ObservationHash: observationHash,
FetchedAt: fetchedAt,
IngestJobId: ingestJobId,
UpstreamId: ghsaId,
UpstreamUrl: ghsaId is not null ? $"https://github.com/advisories/{ghsaId}" : null);
}
}
/// <summary>
/// Aggregated affected symbols for an advisory.
/// </summary>
public sealed record AffectedSymbolSet(
/// <summary>Tenant identifier.</summary>
string TenantId,
/// <summary>Advisory identifier.</summary>
string AdvisoryId,
/// <summary>All affected symbols from all sources.</summary>
ImmutableArray<AffectedSymbol> Symbols,
/// <summary>Summary of sources contributing symbols.</summary>
ImmutableArray<AffectedSymbolSourceSummary> SourceSummaries,
/// <summary>When this set was computed.</summary>
DateTimeOffset ComputedAt)
{
/// <summary>
/// Creates an empty symbol set.
/// </summary>
public static AffectedSymbolSet Empty(string tenantId, string advisoryId, DateTimeOffset computedAt)
{
return new AffectedSymbolSet(
TenantId: tenantId,
AdvisoryId: advisoryId,
Symbols: ImmutableArray<AffectedSymbol>.Empty,
SourceSummaries: ImmutableArray<AffectedSymbolSourceSummary>.Empty,
ComputedAt: computedAt);
}
/// <summary>
/// Total number of unique symbols.
/// </summary>
public int UniqueSymbolCount => Symbols
.Select(s => s.CanonicalId)
.Distinct()
.Count();
/// <summary>
/// Indicates if any symbols have source location information.
/// </summary>
public bool HasSourceLocations => Symbols.Any(s => s.HasSourceLocation);
/// <summary>
/// Gets symbols by type.
/// </summary>
public ImmutableArray<AffectedSymbol> GetByType(AffectedSymbolType type) =>
Symbols.Where(s => s.SymbolType == type).ToImmutableArray();
/// <summary>
/// Gets symbols from a specific source.
/// </summary>
public ImmutableArray<AffectedSymbol> GetBySource(string source) =>
Symbols.Where(s => s.Provenance.Source.Equals(source, StringComparison.OrdinalIgnoreCase))
.ToImmutableArray();
}
/// <summary>
/// Summary of symbols from a single source.
/// </summary>
public sealed record AffectedSymbolSourceSummary(
/// <summary>Source identifier.</summary>
string Source,
/// <summary>Total symbols from this source.</summary>
int SymbolCount,
/// <summary>Symbols with source location info.</summary>
int WithLocationCount,
/// <summary>Count by symbol type.</summary>
ImmutableDictionary<AffectedSymbolType, int> CountByType,
/// <summary>Latest fetch timestamp from this source.</summary>
DateTimeOffset LatestFetchAt);
/// <summary>
/// Query options for affected symbols.
/// </summary>
public sealed record AffectedSymbolQueryOptions(
/// <summary>Tenant identifier (required).</summary>
string TenantId,
/// <summary>Advisory identifier to filter by.</summary>
string? AdvisoryId = null,
/// <summary>Package URL to filter by.</summary>
string? Purl = null,
/// <summary>Symbol types to include.</summary>
ImmutableArray<AffectedSymbolType>? SymbolTypes = null,
/// <summary>Sources to include.</summary>
ImmutableArray<string>? Sources = null,
/// <summary>Only include symbols with source locations.</summary>
bool? WithLocationOnly = null,
/// <summary>Maximum results to return.</summary>
int? Limit = null,
/// <summary>Offset for pagination.</summary>
int? Offset = null)
{
/// <summary>
/// Default query options for a tenant.
/// </summary>
public static AffectedSymbolQueryOptions ForTenant(string tenantId) => new(TenantId: tenantId);
/// <summary>
/// Query options for a specific advisory.
/// </summary>
public static AffectedSymbolQueryOptions ForAdvisory(string tenantId, string advisoryId) =>
new(TenantId: tenantId, AdvisoryId: advisoryId);
/// <summary>
/// Query options for a specific package.
/// </summary>
public static AffectedSymbolQueryOptions ForPackage(string tenantId, string purl) =>
new(TenantId: tenantId, Purl: purl);
}
/// <summary>
/// Result of an affected symbol query.
/// </summary>
public sealed record AffectedSymbolQueryResult(
/// <summary>Query options used.</summary>
AffectedSymbolQueryOptions Query,
/// <summary>Matching symbols.</summary>
ImmutableArray<AffectedSymbol> Symbols,
/// <summary>Total count (before pagination).</summary>
int TotalCount,
/// <summary>Whether more results are available.</summary>
bool HasMore,
/// <summary>When this result was computed.</summary>
DateTimeOffset ComputedAt);

View File

@@ -0,0 +1,703 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Concelier.Core.Signals;
/// <summary>
/// Provider interface for upstream-provided affected symbol/function lists.
/// Per CONCELIER-SIG-26-001, exposes symbols for reachability scoring
/// while maintaining provenance; no exploitability inference.
/// </summary>
public interface IAffectedSymbolProvider
{
/// <summary>
/// Gets affected symbols for an advisory.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="advisoryId">Advisory identifier (e.g., CVE-2024-1234).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Aggregated symbol set from all sources.</returns>
Task<AffectedSymbolSet> GetByAdvisoryAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken);
/// <summary>
/// Gets affected symbols for a package.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="purl">Package URL.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Aggregated symbol set from all sources.</returns>
Task<AffectedSymbolSet> GetByPackageAsync(
string tenantId,
string purl,
CancellationToken cancellationToken);
/// <summary>
/// Queries affected symbols with filtering and pagination.
/// </summary>
/// <param name="options">Query options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Query result with matching symbols.</returns>
Task<AffectedSymbolQueryResult> QueryAsync(
AffectedSymbolQueryOptions options,
CancellationToken cancellationToken);
/// <summary>
/// Gets symbols for multiple advisories in batch.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="advisoryIds">Advisory identifiers.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Dictionary of advisory ID to symbol set.</returns>
Task<ImmutableDictionary<string, AffectedSymbolSet>> GetByAdvisoriesBatchAsync(
string tenantId,
IReadOnlyList<string> advisoryIds,
CancellationToken cancellationToken);
/// <summary>
/// Checks if any symbols exist for an advisory.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="advisoryId">Advisory identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if symbols exist.</returns>
Task<bool> HasSymbolsAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken);
}
/// <summary>
/// Storage interface for affected symbols.
/// </summary>
public interface IAffectedSymbolStore
{
/// <summary>
/// Stores affected symbols.
/// </summary>
Task StoreAsync(
IReadOnlyList<AffectedSymbol> symbols,
CancellationToken cancellationToken);
/// <summary>
/// Gets symbols by advisory.
/// </summary>
Task<ImmutableArray<AffectedSymbol>> GetByAdvisoryAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken);
/// <summary>
/// Gets symbols by package.
/// </summary>
Task<ImmutableArray<AffectedSymbol>> GetByPackageAsync(
string tenantId,
string purl,
CancellationToken cancellationToken);
/// <summary>
/// Queries symbols with options.
/// </summary>
Task<(ImmutableArray<AffectedSymbol> Symbols, int TotalCount)> QueryAsync(
AffectedSymbolQueryOptions options,
CancellationToken cancellationToken);
/// <summary>
/// Checks if symbols exist for an advisory.
/// </summary>
Task<bool> ExistsAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken);
}
/// <summary>
/// Extractor interface for extracting symbols from advisory observations.
/// </summary>
public interface IAffectedSymbolExtractor
{
/// <summary>
/// Extracts affected symbols from a raw advisory observation.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="advisoryId">Advisory identifier.</param>
/// <param name="observationId">Observation identifier.</param>
/// <param name="observationJson">Raw observation JSON.</param>
/// <param name="provenance">Provenance information.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Extracted symbols.</returns>
Task<ImmutableArray<AffectedSymbol>> ExtractAsync(
string tenantId,
string advisoryId,
string observationId,
string observationJson,
AffectedSymbolProvenance provenance,
CancellationToken cancellationToken);
}
/// <summary>
/// Default implementation of <see cref="IAffectedSymbolProvider"/>.
/// </summary>
public sealed class AffectedSymbolProvider : IAffectedSymbolProvider
{
private readonly IAffectedSymbolStore _store;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AffectedSymbolProvider> _logger;
public AffectedSymbolProvider(
IAffectedSymbolStore store,
TimeProvider timeProvider,
ILogger<AffectedSymbolProvider> logger)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<AffectedSymbolSet> GetByAdvisoryAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryId);
_logger.LogDebug(
"Getting affected symbols for advisory {AdvisoryId} in tenant {TenantId}",
advisoryId, tenantId);
var symbols = await _store.GetByAdvisoryAsync(tenantId, advisoryId, cancellationToken);
var now = _timeProvider.GetUtcNow();
if (symbols.IsDefaultOrEmpty)
{
return AffectedSymbolSet.Empty(tenantId, advisoryId, now);
}
var sourceSummaries = ComputeSourceSummaries(symbols);
return new AffectedSymbolSet(
TenantId: tenantId,
AdvisoryId: advisoryId,
Symbols: symbols,
SourceSummaries: sourceSummaries,
ComputedAt: now);
}
/// <inheritdoc />
public async Task<AffectedSymbolSet> GetByPackageAsync(
string tenantId,
string purl,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
_logger.LogDebug(
"Getting affected symbols for package {Purl} in tenant {TenantId}",
purl, tenantId);
var symbols = await _store.GetByPackageAsync(tenantId, purl, cancellationToken);
var now = _timeProvider.GetUtcNow();
if (symbols.IsDefaultOrEmpty)
{
return AffectedSymbolSet.Empty(tenantId, advisoryId: $"pkg:{purl}", now);
}
// Group by advisory to get unique advisory ID
var advisoryId = symbols
.Select(s => s.AdvisoryId)
.Distinct()
.OrderBy(id => id)
.First();
var sourceSummaries = ComputeSourceSummaries(symbols);
return new AffectedSymbolSet(
TenantId: tenantId,
AdvisoryId: advisoryId,
Symbols: symbols,
SourceSummaries: sourceSummaries,
ComputedAt: now);
}
/// <inheritdoc />
public async Task<AffectedSymbolQueryResult> QueryAsync(
AffectedSymbolQueryOptions options,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(options);
ArgumentException.ThrowIfNullOrWhiteSpace(options.TenantId);
_logger.LogDebug(
"Querying affected symbols in tenant {TenantId} with options {@Options}",
options.TenantId, options);
var (symbols, totalCount) = await _store.QueryAsync(options, cancellationToken);
var now = _timeProvider.GetUtcNow();
var limit = options.Limit ?? 100;
var offset = options.Offset ?? 0;
var hasMore = offset + symbols.Length < totalCount;
return new AffectedSymbolQueryResult(
Query: options,
Symbols: symbols,
TotalCount: totalCount,
HasMore: hasMore,
ComputedAt: now);
}
/// <inheritdoc />
public async Task<ImmutableDictionary<string, AffectedSymbolSet>> GetByAdvisoriesBatchAsync(
string tenantId,
IReadOnlyList<string> advisoryIds,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(advisoryIds);
_logger.LogDebug(
"Getting affected symbols for {Count} advisories in tenant {TenantId}",
advisoryIds.Count, tenantId);
var results = ImmutableDictionary.CreateBuilder<string, AffectedSymbolSet>();
// Process in parallel for better performance
var tasks = advisoryIds.Select(async advisoryId =>
{
var symbolSet = await GetByAdvisoryAsync(tenantId, advisoryId, cancellationToken);
return (advisoryId, symbolSet);
});
var completed = await Task.WhenAll(tasks);
foreach (var (advisoryId, symbolSet) in completed)
{
results[advisoryId] = symbolSet;
}
return results.ToImmutable();
}
/// <inheritdoc />
public async Task<bool> HasSymbolsAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryId);
return await _store.ExistsAsync(tenantId, advisoryId, cancellationToken);
}
private static ImmutableArray<AffectedSymbolSourceSummary> ComputeSourceSummaries(
ImmutableArray<AffectedSymbol> symbols)
{
return symbols
.GroupBy(s => s.Provenance.Source, StringComparer.OrdinalIgnoreCase)
.Select(g =>
{
var sourceSymbols = g.ToList();
var countByType = sourceSymbols
.GroupBy(s => s.SymbolType)
.ToImmutableDictionary(
tg => tg.Key,
tg => tg.Count());
return new AffectedSymbolSourceSummary(
Source: g.Key,
SymbolCount: sourceSymbols.Count,
WithLocationCount: sourceSymbols.Count(s => s.HasSourceLocation),
CountByType: countByType,
LatestFetchAt: sourceSymbols.Max(s => s.Provenance.FetchedAt));
})
.OrderByDescending(s => s.SymbolCount)
.ToImmutableArray();
}
}
/// <summary>
/// In-memory implementation of <see cref="IAffectedSymbolStore"/> for testing.
/// </summary>
public sealed class InMemoryAffectedSymbolStore : IAffectedSymbolStore
{
private readonly ConcurrentDictionary<string, List<AffectedSymbol>> _symbolsByTenantAdvisory = new();
private readonly object _lock = new();
/// <inheritdoc />
public Task StoreAsync(
IReadOnlyList<AffectedSymbol> symbols,
CancellationToken cancellationToken)
{
lock (_lock)
{
foreach (var symbol in symbols)
{
var key = $"{symbol.TenantId}:{symbol.AdvisoryId}";
var list = _symbolsByTenantAdvisory.GetOrAdd(key, _ => new List<AffectedSymbol>());
list.Add(symbol);
}
}
return Task.CompletedTask;
}
/// <inheritdoc />
public Task<ImmutableArray<AffectedSymbol>> GetByAdvisoryAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken)
{
var key = $"{tenantId}:{advisoryId}";
if (_symbolsByTenantAdvisory.TryGetValue(key, out var symbols))
{
return Task.FromResult(symbols.ToImmutableArray());
}
return Task.FromResult(ImmutableArray<AffectedSymbol>.Empty);
}
/// <inheritdoc />
public Task<ImmutableArray<AffectedSymbol>> GetByPackageAsync(
string tenantId,
string purl,
CancellationToken cancellationToken)
{
var results = new List<AffectedSymbol>();
foreach (var kvp in _symbolsByTenantAdvisory)
{
foreach (var symbol in kvp.Value)
{
if (symbol.TenantId == tenantId &&
symbol.Purl != null &&
symbol.Purl.Equals(purl, StringComparison.OrdinalIgnoreCase))
{
results.Add(symbol);
}
}
}
return Task.FromResult(results.ToImmutableArray());
}
/// <inheritdoc />
public Task<(ImmutableArray<AffectedSymbol> Symbols, int TotalCount)> QueryAsync(
AffectedSymbolQueryOptions options,
CancellationToken cancellationToken)
{
var query = _symbolsByTenantAdvisory.Values
.SelectMany(list => list)
.Where(s => s.TenantId == options.TenantId);
if (options.AdvisoryId is not null)
{
query = query.Where(s => s.AdvisoryId.Equals(options.AdvisoryId, StringComparison.OrdinalIgnoreCase));
}
if (options.Purl is not null)
{
query = query.Where(s => s.Purl?.Equals(options.Purl, StringComparison.OrdinalIgnoreCase) == true);
}
if (options.SymbolTypes is { IsDefaultOrEmpty: false })
{
query = query.Where(s => options.SymbolTypes.Value.Contains(s.SymbolType));
}
if (options.Sources is { IsDefaultOrEmpty: false })
{
query = query.Where(s => options.Sources.Value.Any(
src => src.Equals(s.Provenance.Source, StringComparison.OrdinalIgnoreCase)));
}
if (options.WithLocationOnly == true)
{
query = query.Where(s => s.HasSourceLocation);
}
var allSymbols = query.ToList();
var totalCount = allSymbols.Count;
var offset = options.Offset ?? 0;
var limit = options.Limit ?? 100;
var paginated = allSymbols
.Skip(offset)
.Take(limit)
.ToImmutableArray();
return Task.FromResult((paginated, totalCount));
}
/// <inheritdoc />
public Task<bool> ExistsAsync(
string tenantId,
string advisoryId,
CancellationToken cancellationToken)
{
var key = $"{tenantId}:{advisoryId}";
return Task.FromResult(
_symbolsByTenantAdvisory.TryGetValue(key, out var symbols) && symbols.Count > 0);
}
/// <summary>
/// Gets the total count of stored symbols.
/// </summary>
public int Count => _symbolsByTenantAdvisory.Values.Sum(list => list.Count);
/// <summary>
/// Clears all stored symbols.
/// </summary>
public void Clear() => _symbolsByTenantAdvisory.Clear();
}
/// <summary>
/// Default extractor for affected symbols from OSV-format advisories.
/// </summary>
public sealed class OsvAffectedSymbolExtractor : IAffectedSymbolExtractor
{
private readonly TimeProvider _timeProvider;
private readonly ILogger<OsvAffectedSymbolExtractor> _logger;
public OsvAffectedSymbolExtractor(
TimeProvider timeProvider,
ILogger<OsvAffectedSymbolExtractor> logger)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public Task<ImmutableArray<AffectedSymbol>> ExtractAsync(
string tenantId,
string advisoryId,
string observationId,
string observationJson,
AffectedSymbolProvenance provenance,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryId);
ArgumentException.ThrowIfNullOrWhiteSpace(observationId);
var symbols = ImmutableArray.CreateBuilder<AffectedSymbol>();
var now = _timeProvider.GetUtcNow();
try
{
using var doc = System.Text.Json.JsonDocument.Parse(observationJson);
var root = doc.RootElement;
// Look for OSV "affected" array with ranges and ecosystem_specific symbols
if (root.TryGetProperty("affected", out var affected) &&
affected.ValueKind == System.Text.Json.JsonValueKind.Array)
{
foreach (var affectedEntry in affected.EnumerateArray())
{
var purl = ExtractPurl(affectedEntry);
var versionRange = ExtractVersionRange(affectedEntry);
// Extract symbols from ecosystem_specific or database_specific
ExtractSymbolsFromEcosystemSpecific(
affectedEntry, symbols, tenantId, advisoryId, observationId,
purl, versionRange, provenance, now);
}
}
}
catch (System.Text.Json.JsonException ex)
{
_logger.LogWarning(ex,
"Failed to parse observation JSON for advisory {AdvisoryId}",
advisoryId);
}
return Task.FromResult(symbols.ToImmutable());
}
private static string? ExtractPurl(System.Text.Json.JsonElement affectedEntry)
{
if (affectedEntry.TryGetProperty("package", out var package))
{
if (package.TryGetProperty("purl", out var purlProp))
{
return purlProp.GetString();
}
// Construct PURL from ecosystem + name
if (package.TryGetProperty("ecosystem", out var ecosystem) &&
package.TryGetProperty("name", out var name))
{
var eco = ecosystem.GetString()?.ToLowerInvariant() ?? "unknown";
var pkgName = name.GetString() ?? "unknown";
return $"pkg:{eco}/{pkgName}";
}
}
return null;
}
private static string? ExtractVersionRange(System.Text.Json.JsonElement affectedEntry)
{
if (affectedEntry.TryGetProperty("ranges", out var ranges) &&
ranges.ValueKind == System.Text.Json.JsonValueKind.Array)
{
foreach (var range in ranges.EnumerateArray())
{
if (range.TryGetProperty("events", out var events) &&
events.ValueKind == System.Text.Json.JsonValueKind.Array)
{
var parts = new List<string>();
foreach (var evt in events.EnumerateArray())
{
if (evt.TryGetProperty("introduced", out var intro))
{
parts.Add($">={intro.GetString()}");
}
if (evt.TryGetProperty("fixed", out var fix))
{
parts.Add($"<{fix.GetString()}");
}
}
if (parts.Count > 0)
{
return string.Join(", ", parts);
}
}
}
}
return null;
}
private void ExtractSymbolsFromEcosystemSpecific(
System.Text.Json.JsonElement affectedEntry,
ImmutableArray<AffectedSymbol>.Builder symbols,
string tenantId,
string advisoryId,
string observationId,
string? purl,
string? versionRange,
AffectedSymbolProvenance provenance,
DateTimeOffset now)
{
// Check ecosystem_specific for symbols
if (affectedEntry.TryGetProperty("ecosystem_specific", out var ecosystemSpecific))
{
ExtractSymbolsFromJson(ecosystemSpecific, symbols, tenantId, advisoryId, observationId,
purl, versionRange, provenance, now);
}
// Check database_specific for symbols
if (affectedEntry.TryGetProperty("database_specific", out var databaseSpecific))
{
ExtractSymbolsFromJson(databaseSpecific, symbols, tenantId, advisoryId, observationId,
purl, versionRange, provenance, now);
}
}
private void ExtractSymbolsFromJson(
System.Text.Json.JsonElement element,
ImmutableArray<AffectedSymbol>.Builder symbols,
string tenantId,
string advisoryId,
string observationId,
string? purl,
string? versionRange,
AffectedSymbolProvenance provenance,
DateTimeOffset now)
{
// Look for common symbol field names
var symbolFields = new[] { "symbols", "functions", "vulnerable_functions", "affected_functions", "methods" };
foreach (var fieldName in symbolFields)
{
if (element.TryGetProperty(fieldName, out var symbolsArray) &&
symbolsArray.ValueKind == System.Text.Json.JsonValueKind.Array)
{
foreach (var symbolEntry in symbolsArray.EnumerateArray())
{
if (symbolEntry.ValueKind == System.Text.Json.JsonValueKind.String)
{
var symbolName = symbolEntry.GetString();
if (!string.IsNullOrWhiteSpace(symbolName))
{
symbols.Add(AffectedSymbol.Function(
tenantId: tenantId,
advisoryId: advisoryId,
observationId: observationId,
symbol: symbolName,
provenance: provenance,
extractedAt: now,
purl: purl,
versionRange: versionRange));
}
}
else if (symbolEntry.ValueKind == System.Text.Json.JsonValueKind.Object)
{
ExtractStructuredSymbol(symbolEntry, symbols, tenantId, advisoryId, observationId,
purl, versionRange, provenance, now);
}
}
}
}
}
private void ExtractStructuredSymbol(
System.Text.Json.JsonElement symbolEntry,
ImmutableArray<AffectedSymbol>.Builder symbols,
string tenantId,
string advisoryId,
string observationId,
string? purl,
string? versionRange,
AffectedSymbolProvenance provenance,
DateTimeOffset now)
{
var name = symbolEntry.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: symbolEntry.TryGetProperty("symbol", out var symProp)
? symProp.GetString()
: null;
if (string.IsNullOrWhiteSpace(name)) return;
var module = symbolEntry.TryGetProperty("module", out var modProp)
? modProp.GetString()
: null;
var className = symbolEntry.TryGetProperty("class", out var classProp)
? classProp.GetString()
: null;
var filePath = symbolEntry.TryGetProperty("file", out var fileProp)
? fileProp.GetString()
: null;
var lineNumber = symbolEntry.TryGetProperty("line", out var lineProp) && lineProp.TryGetInt32(out var line)
? (int?)line
: null;
var symbolType = className is not null ? AffectedSymbolType.Method : AffectedSymbolType.Function;
symbols.Add(new AffectedSymbol(
TenantId: tenantId,
AdvisoryId: advisoryId,
ObservationId: observationId,
Symbol: name,
SymbolType: symbolType,
Purl: purl,
Module: module,
ClassName: className,
FilePath: filePath,
LineNumber: lineNumber,
VersionRange: versionRange,
Provenance: provenance,
Attributes: null,
ExtractedAt: now));
}
}

View File

@@ -0,0 +1,73 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Concelier.Core.Signals;
/// <summary>
/// Service collection extensions for signals-related services.
/// </summary>
public static class SignalsServiceCollectionExtensions
{
/// <summary>
/// Adds affected symbol services to the service collection.
/// Per CONCELIER-SIG-26-001, exposes upstream-provided affected symbol/function
/// lists for reachability scoring while maintaining provenance.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddConcelierSignalsServices(this IServiceCollection services)
{
// Register affected symbol store (in-memory by default; replace with MongoDB in production)
services.TryAddSingleton<IAffectedSymbolStore, InMemoryAffectedSymbolStore>();
// Register affected symbol provider
services.TryAddSingleton<IAffectedSymbolProvider, AffectedSymbolProvider>();
// Register OSV symbol extractor
services.TryAddSingleton<IAffectedSymbolExtractor, OsvAffectedSymbolExtractor>();
// TimeProvider is typically registered elsewhere, but ensure it exists
services.TryAddSingleton(TimeProvider.System);
return services;
}
/// <summary>
/// Adds a custom implementation of <see cref="IAffectedSymbolStore"/>.
/// </summary>
/// <typeparam name="TStore">The store implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddAffectedSymbolStore<TStore>(this IServiceCollection services)
where TStore : class, IAffectedSymbolStore
{
services.AddSingleton<IAffectedSymbolStore, TStore>();
return services;
}
/// <summary>
/// Adds a custom implementation of <see cref="IAffectedSymbolProvider"/>.
/// </summary>
/// <typeparam name="TProvider">The provider implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddAffectedSymbolProvider<TProvider>(this IServiceCollection services)
where TProvider : class, IAffectedSymbolProvider
{
services.AddSingleton<IAffectedSymbolProvider, TProvider>();
return services;
}
/// <summary>
/// Adds a custom implementation of <see cref="IAffectedSymbolExtractor"/>.
/// </summary>
/// <typeparam name="TExtractor">The extractor implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddAffectedSymbolExtractor<TExtractor>(this IServiceCollection services)
where TExtractor : class, IAffectedSymbolExtractor
{
services.AddSingleton<IAffectedSymbolExtractor, TExtractor>();
return services;
}
}

View File

@@ -8,7 +8,6 @@
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />

View File

@@ -0,0 +1,190 @@
using System.Collections;
using System.Text.Json;
namespace MongoDB.Bson
{
public readonly struct ObjectId : IEquatable<ObjectId>
{
public Guid Value { get; }
public ObjectId(Guid value) => Value = value;
public ObjectId(string value) => Value = Guid.TryParse(value, out var g) ? g : Guid.Empty;
public static ObjectId GenerateNewId() => new(Guid.NewGuid());
public static ObjectId Empty => new(Guid.Empty);
public bool Equals(ObjectId other) => Value.Equals(other.Value);
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
public override int GetHashCode() => Value.GetHashCode();
public override string ToString() => Value.ToString("N");
public static bool operator ==(ObjectId left, ObjectId right) => left.Equals(right);
public static bool operator !=(ObjectId left, ObjectId right) => !left.Equals(right);
}
public enum BsonType { Document, Array, String, Boolean, Int32, Int64, Double, DateTime, Guid, Null }
public class BsonValue
{
protected readonly object? _value;
public BsonValue(object? value) => _value = value;
public virtual BsonType BsonType => _value switch
{
null => BsonType.Null,
BsonDocument => BsonType.Document,
BsonArray => BsonType.Array,
string => BsonType.String,
bool => BsonType.Boolean,
int => BsonType.Int32,
long => BsonType.Int64,
double => BsonType.Double,
DateTime => BsonType.DateTime,
Guid => BsonType.Guid,
_ => BsonType.Null
};
public bool IsString => _value is string;
public bool IsBsonDocument => _value is BsonDocument;
public bool IsBsonArray => _value is BsonArray;
public string AsString => _value?.ToString() ?? string.Empty;
public BsonDocument AsBsonDocument => _value as BsonDocument ?? throw new InvalidCastException();
public BsonArray AsBsonArray => _value as BsonArray ?? throw new InvalidCastException();
public Guid AsGuid => _value is Guid g ? g : Guid.Empty;
public DateTime AsDateTime => _value is DateTime dt ? dt : DateTime.MinValue;
public int AsInt32 => _value is int i ? i : 0;
public long AsInt64 => _value is long l ? l : 0;
public double AsDouble => _value is double d ? d : 0d;
public bool AsBoolean => _value is bool b && b;
public override string ToString() => _value?.ToString() ?? string.Empty;
}
public class BsonString : BsonValue { public BsonString(string value) : base(value) { } }
public class BsonBoolean : BsonValue { public BsonBoolean(bool value) : base(value) { } }
public class BsonInt32 : BsonValue { public BsonInt32(int value) : base(value) { } }
public class BsonInt64 : BsonValue { public BsonInt64(long value) : base(value) { } }
public class BsonDouble : BsonValue { public BsonDouble(double value) : base(value) { } }
public class BsonDateTime : BsonValue { public BsonDateTime(DateTime value) : base(value) { } }
public class BsonArray : BsonValue, IEnumerable<BsonValue>
{
private readonly List<BsonValue> _items = new();
public BsonArray() : base(null) { }
public BsonArray(IEnumerable<BsonValue> values) : this() => _items.AddRange(values);
public void Add(BsonValue value) => _items.Add(value);
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public BsonValue this[int index] { get => _items[index]; set => _items[index] = value; }
public int Count => _items.Count;
}
public class BsonDocument : BsonValue, IEnumerable<KeyValuePair<string, BsonValue>>
{
private readonly Dictionary<string, BsonValue> _values = new(StringComparer.Ordinal);
public BsonDocument() : base(null) { }
public BsonDocument(string key, object? value) : this() => _values[key] = Wrap(value);
public BsonDocument(IEnumerable<KeyValuePair<string, object?>> pairs) : this()
{
foreach (var kvp in pairs)
{
_values[kvp.Key] = Wrap(kvp.Value);
}
}
private static BsonValue Wrap(object? value) => value switch
{
BsonValue v => v,
IEnumerable<BsonValue> enumerable => new BsonArray(enumerable),
IEnumerable<object?> objEnum => new BsonArray(objEnum.Select(Wrap)),
_ => new BsonValue(value)
};
public BsonValue this[string key]
{
get => _values[key];
set => _values[key] = value;
}
public int ElementCount => _values.Count;
public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!);
public void Add(string key, BsonValue value) => _values[key] = value;
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public BsonDocument DeepClone()
{
var clone = new BsonDocument();
foreach (var kvp in _values)
{
clone[kvp.Key] = kvp.Value;
}
return clone;
}
public static BsonDocument Parse(string json)
{
using var doc = JsonDocument.Parse(json);
return FromElement(doc.RootElement);
}
private static BsonDocument FromElement(JsonElement element)
{
var doc = new BsonDocument();
foreach (var prop in element.EnumerateObject())
{
doc[prop.Name] = FromJsonValue(prop.Value);
}
return doc;
}
private static BsonValue FromJsonValue(JsonElement element) => element.ValueKind switch
{
JsonValueKind.Object => FromElement(element),
JsonValueKind.Array => new BsonArray(element.EnumerateArray().Select(FromJsonValue)),
JsonValueKind.String => new BsonString(element.GetString() ?? string.Empty),
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonInt64(l) : new BsonDouble(element.GetDouble()),
JsonValueKind.True => new BsonBoolean(true),
JsonValueKind.False => new BsonBoolean(false),
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
_ => new BsonValue(null)
};
public string ToJson(MongoDB.Bson.IO.JsonWriterSettings? settings = null)
{
var dict = _values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value));
return JsonSerializer.Serialize(dict, new JsonSerializerOptions(JsonSerializerDefaults.Web));
}
private static object? Unwrap(BsonValue value) => value switch
{
BsonDocument doc => doc._values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)),
BsonArray array => array.Select(Unwrap).ToArray(),
_ => value._value
};
}
}
namespace MongoDB.Bson.IO
{
public enum JsonOutputMode { Strict, RelaxedExtendedJson }
public class JsonWriterSettings
{
public JsonOutputMode OutputMode { get; set; } = JsonOutputMode.Strict;
}
}
namespace MongoDB.Driver
{
public interface IClientSessionHandle { }
public class MongoCommandException : Exception
{
public string CodeName { get; }
public MongoCommandException(string codeName, string message) : base(message) => CodeName = codeName;
}
public class GridFSFileNotFoundException : Exception
{
public GridFSFileNotFoundException() { }
public GridFSFileNotFoundException(string message) : base(message) { }
}
public class MongoClient
{
public MongoClient(string connectionString) { }
}
}

View File

@@ -0,0 +1,354 @@
using System.Collections.Concurrent;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Storage.Mongo
{
public static class DocumentStatuses
{
public const string PendingParse = "pending_parse";
public const string PendingMap = "pending_map";
public const string Mapped = "mapped";
public const string Failed = "failed";
}
public sealed record MongoStorageOptions
{
public string DefaultTenant { get; init; } = "default";
public TimeSpan RawDocumentRetention { get; init; } = TimeSpan.Zero;
public TimeSpan RawDocumentRetentionTtlGrace { get; init; } = TimeSpan.Zero;
public TimeSpan RawDocumentRetentionSweepInterval { get; init; } = TimeSpan.FromHours(1);
public string ConnectionString { get; init; } = string.Empty;
public string DatabaseName { get; init; } = "concelier";
}
public sealed record DocumentRecord(
Guid Id,
string SourceName,
string Uri,
DateTimeOffset CreatedAt,
string Sha256,
string Status,
string? ContentType = null,
IReadOnlyDictionary<string, string>? Headers = null,
IReadOnlyDictionary<string, string>? Metadata = null,
string? Etag = null,
DateTimeOffset? LastModified = null,
MongoDB.Bson.ObjectId? GridFsId = null,
DateTimeOffset? ExpiresAt = null);
public interface IDocumentStore
{
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
}
public sealed class InMemoryDocumentStore : IDocumentStore
{
private readonly ConcurrentDictionary<(string Source, string Uri), DocumentRecord> _records = new();
private readonly ConcurrentDictionary<Guid, DocumentRecord> _byId = new();
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_records.TryGetValue((sourceName, uri), out var record);
return Task.FromResult<DocumentRecord?>(record);
}
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_byId.TryGetValue(id, out var record);
return Task.FromResult<DocumentRecord?>(record);
}
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_records[(record.SourceName, record.Uri)] = record;
_byId[record.Id] = record;
return Task.FromResult(record);
}
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
if (_byId.TryGetValue(id, out var existing))
{
var updated = existing with { Status = status };
_byId[id] = updated;
_records[(existing.SourceName, existing.Uri)] = updated;
}
return Task.CompletedTask;
}
}
public sealed record DtoRecord(
Guid Id,
Guid DocumentId,
string SourceName,
string Format,
MongoDB.Bson.BsonDocument Payload,
DateTimeOffset CreatedAt);
public interface IDtoStore
{
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
}
public sealed class InMemoryDtoStore : IDtoStore
{
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_records[record.DocumentId] = record;
return Task.FromResult(record);
}
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
{
_records.TryGetValue(documentId, out var record);
return Task.FromResult<DtoRecord?>(record);
}
}
public sealed class RawDocumentStorage
{
private readonly ConcurrentDictionary<MongoDB.Bson.ObjectId, byte[]> _blobs = new();
public Task<MongoDB.Bson.ObjectId> UploadAsync(string sourceName, string uri, byte[] content, string? contentType, DateTimeOffset? expiresAt, CancellationToken cancellationToken)
{
var id = MongoDB.Bson.ObjectId.GenerateNewId();
_blobs[id] = content.ToArray();
return Task.FromResult(id);
}
public Task<MongoDB.Bson.ObjectId> UploadAsync(string sourceName, string uri, byte[] content, string? contentType, CancellationToken cancellationToken)
=> UploadAsync(sourceName, uri, content, contentType, null, cancellationToken);
public Task<byte[]> DownloadAsync(MongoDB.Bson.ObjectId id, CancellationToken cancellationToken)
{
if (_blobs.TryGetValue(id, out var bytes))
{
return Task.FromResult(bytes);
}
throw new MongoDB.Driver.GridFSFileNotFoundException($"Blob {id} not found.");
}
public Task DeleteAsync(MongoDB.Bson.ObjectId id, CancellationToken cancellationToken)
{
_blobs.TryRemove(id, out _);
return Task.CompletedTask;
}
}
public sealed record SourceStateRecord(string SourceName, MongoDB.Bson.BsonDocument? Cursor, DateTimeOffset UpdatedAt);
public interface ISourceStateRepository
{
Task<SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken);
Task UpdateCursorAsync(string sourceName, MongoDB.Bson.BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken);
Task MarkFailureAsync(string sourceName, DateTimeOffset now, TimeSpan backoff, string reason, CancellationToken cancellationToken);
}
public sealed class InMemorySourceStateRepository : ISourceStateRepository
{
private readonly ConcurrentDictionary<string, SourceStateRecord> _states = new(StringComparer.OrdinalIgnoreCase);
public Task<SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken)
{
_states.TryGetValue(sourceName, out var record);
return Task.FromResult<SourceStateRecord?>(record);
}
public Task UpdateCursorAsync(string sourceName, MongoDB.Bson.BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken)
{
_states[sourceName] = new SourceStateRecord(sourceName, cursor.DeepClone(), completedAt);
return Task.CompletedTask;
}
public Task MarkFailureAsync(string sourceName, DateTimeOffset now, TimeSpan backoff, string reason, CancellationToken cancellationToken)
{
_states[sourceName] = new SourceStateRecord(sourceName, null, now.Add(backoff));
return Task.CompletedTask;
}
}
}
namespace StellaOps.Concelier.Storage.Mongo.Aliases
{
public sealed record AliasRecord(string AdvisoryKey, string Scheme, string Value);
public interface IAliasStore
{
Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken);
Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken);
}
public sealed class InMemoryAliasStore : IAliasStore
{
private readonly ConcurrentDictionary<string, List<AliasRecord>> _byAdvisory = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<(string Scheme, string Value), List<AliasRecord>> _byAlias = new();
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
{
_byAdvisory.TryGetValue(advisoryKey, out var records);
return Task.FromResult<IReadOnlyList<AliasRecord>>(records ?? Array.Empty<AliasRecord>());
}
public Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken)
{
_byAlias.TryGetValue((scheme, value), out var records);
return Task.FromResult<IReadOnlyList<AliasRecord>>(records ?? Array.Empty<AliasRecord>());
}
}
}
namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory
{
public sealed record ChangeHistoryFieldChange(string Field, string ChangeType, string? PreviousValue, string? CurrentValue);
public sealed record ChangeHistoryRecord(
Guid Id,
string SourceName,
string AdvisoryKey,
Guid DocumentId,
string DocumentHash,
string SnapshotHash,
string PreviousSnapshotHash,
string Snapshot,
string PreviousSnapshot,
IReadOnlyList<ChangeHistoryFieldChange> Changes,
DateTimeOffset CreatedAt);
public interface IChangeHistoryStore
{
Task AddAsync(ChangeHistoryRecord record, CancellationToken cancellationToken);
}
public sealed class InMemoryChangeHistoryStore : IChangeHistoryStore
{
private readonly ConcurrentBag<ChangeHistoryRecord> _records = new();
public Task AddAsync(ChangeHistoryRecord record, CancellationToken cancellationToken)
{
_records.Add(record);
return Task.CompletedTask;
}
}
}
namespace StellaOps.Concelier.Storage.Mongo.Exporting
{
public sealed record ExportFileRecord(string Path, long Length, string Digest);
public sealed record ExportStateRecord(
string Id,
string ExportCursor,
string? LastFullDigest,
string? LastDeltaDigest,
string? BaseExportId,
string? BaseDigest,
string? TargetRepository,
IReadOnlyList<ExportFileRecord> Files,
string ExporterVersion,
DateTimeOffset UpdatedAt);
public interface IExportStateStore
{
Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken);
Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken);
}
public sealed class ExportStateManager
{
private readonly IExportStateStore _store;
private readonly TimeProvider _timeProvider;
public ExportStateManager(IExportStateStore store, TimeProvider? timeProvider = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<ExportStateRecord?> GetAsync(string id, CancellationToken cancellationToken)
=> _store.FindAsync(id, cancellationToken);
public Task StoreFullExportAsync(
string id,
string exportId,
string digest,
string? cursor,
string? targetRepository,
string exporterVersion,
bool resetBaseline,
IReadOnlyList<ExportFileRecord> manifest,
CancellationToken cancellationToken)
{
var record = new ExportStateRecord(
id,
cursor ?? digest,
digest,
lastDeltaDigest: null,
baseExportId: resetBaseline ? exportId : null,
baseDigest: resetBaseline ? digest : null,
targetRepository,
manifest,
exporterVersion,
_timeProvider.GetUtcNow());
return _store.UpsertAsync(record, cancellationToken);
}
public Task StoreDeltaExportAsync(
string id,
string deltaDigest,
string? cursor,
string exporterVersion,
IReadOnlyList<ExportFileRecord> manifest,
CancellationToken cancellationToken)
{
var record = new ExportStateRecord(
id,
cursor ?? deltaDigest,
lastFullDigest: null,
lastDeltaDigest: deltaDigest,
baseExportId: null,
baseDigest: null,
targetRepository: null,
manifest,
exporterVersion,
_timeProvider.GetUtcNow());
return _store.UpsertAsync(record, cancellationToken);
}
}
public sealed class InMemoryExportStateStore : IExportStateStore
{
private readonly ConcurrentDictionary<string, ExportStateRecord> _records = new(StringComparer.OrdinalIgnoreCase);
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
{
_records.TryGetValue(id, out var record);
return Task.FromResult<ExportStateRecord?>(record);
}
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
_records[record.Id] = record;
return Task.FromResult(record);
}
}
}
namespace StellaOps.Concelier.Storage.Mongo.MergeEvents
{
public sealed record MergeEventRecord(string AdvisoryKey, string EventType, DateTimeOffset CreatedAt);
}
namespace StellaOps.Concelier.Storage.Mongo
{
public static class MongoStorageDefaults
{
public static class Collections
{
public const string AdvisoryStatements = "advisory_statements";
public const string AdvisoryRaw = "advisory_raw";
}
}
}

View File

@@ -6,7 +6,4 @@
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Bson" Version="3.5.0" />
</ItemGroup>
</Project>

View File

@@ -1,21 +1,12 @@
using System.Text.Json;
using MongoDB.Bson;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Conversion;
/// <summary>
/// Converts MongoDB advisory documents to PostgreSQL entity structures.
/// This converter handles the transformation from MongoDB's document-based storage
/// to PostgreSQL's relational structure with normalized child tables.
/// Converts domain advisories to PostgreSQL entity structures.
/// </summary>
/// <remarks>
/// Task: PG-T5b.1.1 - Build AdvisoryConverter to parse MongoDB documents
/// Task: PG-T5b.1.2 - Map to relational structure with child tables
/// Task: PG-T5b.1.3 - Preserve provenance JSONB
/// Task: PG-T5b.1.4 - Handle version ranges (keep as JSONB)
/// </remarks>
public sealed class AdvisoryConverter
{
private static readonly JsonSerializerOptions JsonOptions = new()
@@ -25,86 +16,8 @@ public sealed class AdvisoryConverter
};
/// <summary>
/// Converts a MongoDB BsonDocument payload to PostgreSQL entities.
/// Converts an Advisory domain model to PostgreSQL entities.
/// </summary>
/// <param name="payload">The MongoDB advisory payload (BsonDocument).</param>
/// <param name="sourceId">Optional source ID to associate with the advisory.</param>
/// <returns>A conversion result containing the main entity and all child entities.</returns>
public AdvisoryConversionResult Convert(BsonDocument payload, Guid? sourceId = null)
{
ArgumentNullException.ThrowIfNull(payload);
var advisoryKey = payload.GetValue("advisoryKey", defaultValue: null)?.AsString
?? throw new InvalidOperationException("advisoryKey missing from payload.");
var title = payload.GetValue("title", defaultValue: null)?.AsString ?? advisoryKey;
var summary = TryGetString(payload, "summary");
var description = TryGetString(payload, "description");
var severity = TryGetString(payload, "severity");
var published = TryReadDateTime(payload, "published");
var modified = TryReadDateTime(payload, "modified");
// Extract primary vulnerability ID from aliases (first CVE if available)
var aliases = ExtractAliases(payload);
var cveAlias = aliases.FirstOrDefault(a => a.AliasType == "cve");
var firstAlias = aliases.FirstOrDefault();
var primaryVulnId = cveAlias != default ? cveAlias.AliasValue
: (firstAlias != default ? firstAlias.AliasValue : advisoryKey);
// Extract provenance and serialize to JSONB
var provenanceJson = ExtractProvenanceJson(payload);
// Create the main advisory entity
var advisoryId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
var advisory = new AdvisoryEntity
{
Id = advisoryId,
AdvisoryKey = advisoryKey,
PrimaryVulnId = primaryVulnId,
SourceId = sourceId,
Title = title,
Summary = summary,
Description = description,
Severity = severity,
PublishedAt = published,
ModifiedAt = modified,
WithdrawnAt = null,
Provenance = provenanceJson,
RawPayload = payload.ToJson(),
CreatedAt = now,
UpdatedAt = now
};
// Convert all child entities
var aliasEntities = ConvertAliases(advisoryId, aliases, now);
var cvssEntities = ConvertCvss(advisoryId, payload, now);
var affectedEntities = ConvertAffected(advisoryId, payload, now);
var referenceEntities = ConvertReferences(advisoryId, payload, now);
var creditEntities = ConvertCredits(advisoryId, payload, now);
var weaknessEntities = ConvertWeaknesses(advisoryId, payload, now);
var kevFlags = ConvertKevFlags(advisoryId, payload, now);
return new AdvisoryConversionResult
{
Advisory = advisory,
Aliases = aliasEntities,
Cvss = cvssEntities,
Affected = affectedEntities,
References = referenceEntities,
Credits = creditEntities,
Weaknesses = weaknessEntities,
KevFlags = kevFlags
};
}
/// <summary>
/// Converts an Advisory domain model directly to PostgreSQL entities.
/// </summary>
/// <param name="advisory">The Advisory domain model.</param>
/// <param name="sourceId">Optional source ID.</param>
/// <returns>A conversion result containing all entities.</returns>
public AdvisoryConversionResult ConvertFromDomain(Advisory advisory, Guid? sourceId = null)
{
ArgumentNullException.ThrowIfNull(advisory);
@@ -112,13 +25,11 @@ public sealed class AdvisoryConverter
var advisoryId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
// Determine primary vulnerability ID
var primaryVulnId = advisory.Aliases
.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
?? advisory.Aliases.FirstOrDefault()
?? advisory.AdvisoryKey;
// Serialize provenance to JSON
var provenanceJson = JsonSerializer.Serialize(advisory.Provenance, JsonOptions);
var entity = new AdvisoryEntity
@@ -140,7 +51,7 @@ public sealed class AdvisoryConverter
UpdatedAt = now
};
// Convert aliases
// Aliases
var aliasEntities = new List<AdvisoryAliasEntity>();
var isPrimarySet = false;
foreach (var alias in advisory.Aliases)
@@ -160,7 +71,7 @@ public sealed class AdvisoryConverter
});
}
// Convert CVSS metrics
// CVSS
var cvssEntities = new List<AdvisoryCvssEntity>();
var isPrimaryCvss = true;
foreach (var metric in advisory.CvssMetrics)
@@ -182,7 +93,7 @@ public sealed class AdvisoryConverter
isPrimaryCvss = false;
}
// Convert affected packages
// Affected packages
var affectedEntities = new List<AdvisoryAffectedEntity>();
foreach (var pkg in advisory.AffectedPackages)
{
@@ -204,48 +115,60 @@ public sealed class AdvisoryConverter
});
}
// Convert references
var referenceEntities = new List<AdvisoryReferenceEntity>();
foreach (var reference in advisory.References)
// References
var referenceEntities = advisory.References.Select(reference => new AdvisoryReferenceEntity
{
referenceEntities.Add(new AdvisoryReferenceEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
RefType = reference.Kind ?? "web",
Url = reference.Url,
CreatedAt = now
});
}
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
RefType = reference.Kind ?? "web",
Url = reference.Url,
CreatedAt = now
}).ToList();
// Convert credits
var creditEntities = new List<AdvisoryCreditEntity>();
foreach (var credit in advisory.Credits)
// Credits
var creditEntities = advisory.Credits.Select(credit => new AdvisoryCreditEntity
{
creditEntities.Add(new AdvisoryCreditEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
Name = credit.DisplayName,
Contact = credit.Contacts.FirstOrDefault(),
CreditType = credit.Role,
CreatedAt = now
});
}
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
Name = credit.DisplayName,
Contact = credit.Contacts.FirstOrDefault(),
CreditType = credit.Role,
CreatedAt = now
}).ToList();
// Convert weaknesses
var weaknessEntities = new List<AdvisoryWeaknessEntity>();
foreach (var weakness in advisory.Cwes)
// Weaknesses
var weaknessEntities = advisory.Cwes.Select(weakness => new AdvisoryWeaknessEntity
{
weaknessEntities.Add(new AdvisoryWeaknessEntity
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CweId = weakness.Identifier,
Description = weakness.Name,
Source = weakness.Provenance.FirstOrDefault()?.Source,
CreatedAt = now
}).ToList();
// KEV flags from domain data
var kevFlags = new List<KevFlagEntity>();
if (advisory.ExploitKnown)
{
var cveId = advisory.Aliases.FirstOrDefault(a => a.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(cveId))
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CweId = weakness.Identifier,
Description = weakness.Name,
Source = weakness.Provenance.FirstOrDefault()?.Source,
CreatedAt = now
});
kevFlags.Add(new KevFlagEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CveId = cveId,
VendorProject = null,
Product = null,
VulnerabilityName = advisory.Title,
DateAdded = DateOnly.FromDateTime(now.UtcDateTime),
DueDate = null,
KnownRansomwareUse = false,
Notes = null,
CreatedAt = now
});
}
}
return new AdvisoryConversionResult
@@ -257,32 +180,10 @@ public sealed class AdvisoryConverter
References = referenceEntities,
Credits = creditEntities,
Weaknesses = weaknessEntities,
KevFlags = new List<KevFlagEntity>()
KevFlags = kevFlags
};
}
private static List<(string AliasType, string AliasValue, bool IsPrimary)> ExtractAliases(BsonDocument payload)
{
var result = new List<(string AliasType, string AliasValue, bool IsPrimary)>();
if (!payload.TryGetValue("aliases", out var aliasValue) || aliasValue is not BsonArray aliasArray)
{
return result;
}
var isPrimarySet = false;
foreach (var alias in aliasArray.OfType<BsonValue>().Where(x => x.IsString).Select(x => x.AsString))
{
var aliasType = DetermineAliasType(alias);
var isPrimary = !isPrimarySet && aliasType == "cve";
if (isPrimary) isPrimarySet = true;
result.Add((aliasType, alias, isPrimary));
}
return result;
}
private static string DetermineAliasType(string alias)
{
if (alias.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
@@ -305,288 +206,8 @@ public sealed class AdvisoryConverter
return "other";
}
private static string ExtractProvenanceJson(BsonDocument payload)
{
if (!payload.TryGetValue("provenance", out var provenanceValue) || provenanceValue is not BsonArray provenanceArray)
{
return "[]";
}
return provenanceArray.ToJson();
}
private static List<AdvisoryAliasEntity> ConvertAliases(
Guid advisoryId,
List<(string AliasType, string AliasValue, bool IsPrimary)> aliases,
DateTimeOffset now)
{
return aliases.Select(a => new AdvisoryAliasEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
AliasType = a.AliasType,
AliasValue = a.AliasValue,
IsPrimary = a.IsPrimary,
CreatedAt = now
}).ToList();
}
private static List<AdvisoryCvssEntity> ConvertCvss(Guid advisoryId, BsonDocument payload, DateTimeOffset now)
{
var result = new List<AdvisoryCvssEntity>();
if (!payload.TryGetValue("cvssMetrics", out var cvssValue) || cvssValue is not BsonArray cvssArray)
{
return result;
}
var isPrimary = true;
foreach (var doc in cvssArray.OfType<BsonDocument>())
{
var version = doc.GetValue("version", defaultValue: null)?.AsString;
var vector = doc.GetValue("vector", defaultValue: null)?.AsString;
var baseScore = doc.TryGetValue("baseScore", out var scoreValue) && scoreValue.IsNumeric
? (decimal)scoreValue.ToDouble()
: 0m;
var baseSeverity = TryGetString(doc, "baseSeverity");
var source = doc.TryGetValue("provenance", out var provValue) && provValue.IsBsonDocument
? TryGetString(provValue.AsBsonDocument, "source")
: null;
if (string.IsNullOrEmpty(version) || string.IsNullOrEmpty(vector))
continue;
result.Add(new AdvisoryCvssEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CvssVersion = version,
VectorString = vector,
BaseScore = baseScore,
BaseSeverity = baseSeverity,
ExploitabilityScore = null,
ImpactScore = null,
Source = source,
IsPrimary = isPrimary,
CreatedAt = now
});
isPrimary = false;
}
return result;
}
private static List<AdvisoryAffectedEntity> ConvertAffected(Guid advisoryId, BsonDocument payload, DateTimeOffset now)
{
var result = new List<AdvisoryAffectedEntity>();
if (!payload.TryGetValue("affectedPackages", out var affectedValue) || affectedValue is not BsonArray affectedArray)
{
return result;
}
foreach (var doc in affectedArray.OfType<BsonDocument>())
{
var type = doc.GetValue("type", defaultValue: null)?.AsString ?? "semver";
var identifier = doc.GetValue("identifier", defaultValue: null)?.AsString;
if (string.IsNullOrEmpty(identifier))
continue;
var ecosystem = MapTypeToEcosystem(type);
// Version ranges kept as JSONB (PG-T5b.1.4)
var versionRangeJson = "{}";
if (doc.TryGetValue("versionRanges", out var rangesValue) && rangesValue is BsonArray)
{
versionRangeJson = rangesValue.ToJson();
}
string[]? versionsFixed = null;
if (doc.TryGetValue("versionRanges", out var rangesForFixed) && rangesForFixed is BsonArray rangesArr)
{
versionsFixed = rangesArr.OfType<BsonDocument>()
.Select(r => TryGetString(r, "fixedVersion"))
.Where(v => !string.IsNullOrEmpty(v))
.Select(v => v!)
.ToArray();
if (versionsFixed.Length == 0) versionsFixed = null;
}
result.Add(new AdvisoryAffectedEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
Ecosystem = ecosystem,
PackageName = identifier,
Purl = BuildPurl(ecosystem, identifier),
VersionRange = versionRangeJson,
VersionsAffected = null,
VersionsFixed = versionsFixed,
DatabaseSpecific = null,
CreatedAt = now
});
}
return result;
}
private static List<AdvisoryReferenceEntity> ConvertReferences(Guid advisoryId, BsonDocument payload, DateTimeOffset now)
{
var result = new List<AdvisoryReferenceEntity>();
if (!payload.TryGetValue("references", out var referencesValue) || referencesValue is not BsonArray referencesArray)
{
return result;
}
foreach (var doc in referencesArray.OfType<BsonDocument>())
{
var url = doc.GetValue("url", defaultValue: null)?.AsString;
if (string.IsNullOrEmpty(url))
continue;
var kind = TryGetString(doc, "kind") ?? "web";
result.Add(new AdvisoryReferenceEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
RefType = kind,
Url = url,
CreatedAt = now
});
}
return result;
}
private static List<AdvisoryCreditEntity> ConvertCredits(Guid advisoryId, BsonDocument payload, DateTimeOffset now)
{
var result = new List<AdvisoryCreditEntity>();
if (!payload.TryGetValue("credits", out var creditsValue) || creditsValue is not BsonArray creditsArray)
{
return result;
}
foreach (var doc in creditsArray.OfType<BsonDocument>())
{
var displayName = doc.GetValue("displayName", defaultValue: null)?.AsString;
if (string.IsNullOrEmpty(displayName))
continue;
var role = TryGetString(doc, "role");
string? contact = null;
if (doc.TryGetValue("contacts", out var contactsValue) && contactsValue is BsonArray contactsArray)
{
contact = contactsArray.OfType<BsonValue>()
.Where(v => v.IsString)
.Select(v => v.AsString)
.FirstOrDefault();
}
result.Add(new AdvisoryCreditEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
Name = displayName,
Contact = contact,
CreditType = role,
CreatedAt = now
});
}
return result;
}
private static List<AdvisoryWeaknessEntity> ConvertWeaknesses(Guid advisoryId, BsonDocument payload, DateTimeOffset now)
{
var result = new List<AdvisoryWeaknessEntity>();
if (!payload.TryGetValue("cwes", out var cwesValue) || cwesValue is not BsonArray cwesArray)
{
return result;
}
foreach (var doc in cwesArray.OfType<BsonDocument>())
{
var identifier = doc.GetValue("identifier", defaultValue: null)?.AsString;
if (string.IsNullOrEmpty(identifier))
continue;
var name = TryGetString(doc, "name");
string? source = null;
if (doc.TryGetValue("provenance", out var provValue) && provValue.IsBsonDocument)
{
source = TryGetString(provValue.AsBsonDocument, "source");
}
result.Add(new AdvisoryWeaknessEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CweId = identifier,
Description = name,
Source = source,
CreatedAt = now
});
}
return result;
}
private static List<KevFlagEntity> ConvertKevFlags(Guid advisoryId, BsonDocument payload, DateTimeOffset now)
{
// KEV flags are typically stored separately; this handles inline KEV data if present
var result = new List<KevFlagEntity>();
// Check for exploitKnown flag
var exploitKnown = payload.TryGetValue("exploitKnown", out var exploitValue)
&& exploitValue.IsBoolean
&& exploitValue.AsBoolean;
if (!exploitKnown)
{
return result;
}
// Extract CVE ID for KEV flag
string? cveId = null;
if (payload.TryGetValue("aliases", out var aliasValue) && aliasValue is BsonArray aliasArray)
{
cveId = aliasArray.OfType<BsonValue>()
.Where(v => v.IsString && v.AsString.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
.Select(v => v.AsString)
.FirstOrDefault();
}
if (string.IsNullOrEmpty(cveId))
{
return result;
}
result.Add(new KevFlagEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CveId = cveId,
VendorProject = null,
Product = null,
VulnerabilityName = TryGetString(payload, "title"),
DateAdded = DateOnly.FromDateTime(now.UtcDateTime),
DueDate = null,
KnownRansomwareUse = false,
Notes = null,
CreatedAt = now
});
return result;
}
private static string MapTypeToEcosystem(string type)
{
return type.ToLowerInvariant() switch
private static string MapTypeToEcosystem(string type) =>
type.ToLowerInvariant() switch
{
"npm" => "npm",
"pypi" => "pypi",
@@ -607,12 +228,9 @@ public sealed class AdvisoryConverter
"ics-vendor" => "ics",
_ => "generic"
};
}
private static string? BuildPurl(string ecosystem, string identifier)
{
// Only build PURL for supported ecosystems
return ecosystem switch
private static string? BuildPurl(string ecosystem, string identifier) =>
ecosystem switch
{
"npm" => $"pkg:npm/{identifier}",
"pypi" => $"pkg:pypi/{identifier}",
@@ -626,7 +244,6 @@ public sealed class AdvisoryConverter
"pub" => $"pkg:pub/{identifier}",
_ => null
};
}
private static string[]? ExtractFixedVersions(IEnumerable<AffectedVersionRange> ranges)
{
@@ -638,22 +255,4 @@ public sealed class AdvisoryConverter
return fixedVersions.Length > 0 ? fixedVersions : null;
}
private static string? TryGetString(BsonDocument doc, string field)
{
return doc.TryGetValue(field, out var value) && value.IsString ? value.AsString : null;
}
private static DateTimeOffset? TryReadDateTime(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value))
return null;
return value switch
{
BsonDateTime dateTime => DateTime.SpecifyKind(dateTime.ToUniversalTime(), DateTimeKind.Utc),
BsonString stringValue when DateTimeOffset.TryParse(stringValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null
};
}
}

View File

@@ -1,40 +0,0 @@
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Concelier.Storage.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Converters;
/// <summary>
/// Service to convert Mongo advisory documents and persist them into PostgreSQL.
/// </summary>
public sealed class AdvisoryConversionService
{
private readonly IAdvisoryRepository _advisories;
public AdvisoryConversionService(IAdvisoryRepository advisories)
{
_advisories = advisories;
}
/// <summary>
/// Converts a Mongo advisory document and persists it (upsert) with all child rows.
/// </summary>
public Task<AdvisoryEntity> ConvertAndUpsertAsync(
AdvisoryDocument doc,
string sourceKey,
Guid sourceId,
CancellationToken cancellationToken = default)
{
var result = AdvisoryConverter.Convert(doc, sourceKey, sourceId);
return _advisories.UpsertAsync(
result.Advisory,
result.Aliases,
result.Cvss,
result.Affected,
result.References,
result.Credits,
result.Weaknesses,
result.KevFlags,
cancellationToken);
}
}

View File

@@ -1,297 +0,0 @@
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Postgres.Models;
namespace StellaOps.Concelier.Storage.Postgres.Converters;
/// <summary>
/// Converts Mongo advisory documents to Postgres advisory entities and child collections.
/// Deterministic: ordering of child collections is preserved (sorted for stable SQL writes).
/// </summary>
public static class AdvisoryConverter
{
public sealed record Result(
AdvisoryEntity Advisory,
IReadOnlyList<AdvisoryAliasEntity> Aliases,
IReadOnlyList<AdvisoryCvssEntity> Cvss,
IReadOnlyList<AdvisoryAffectedEntity> Affected,
IReadOnlyList<AdvisoryReferenceEntity> References,
IReadOnlyList<AdvisoryCreditEntity> Credits,
IReadOnlyList<AdvisoryWeaknessEntity> Weaknesses,
IReadOnlyList<KevFlagEntity> KevFlags);
/// <summary>
/// Maps a Mongo AdvisoryDocument and its raw payload into Postgres entities.
/// </summary>
public static Result Convert(
AdvisoryDocument doc,
string sourceKey,
Guid sourceId,
string? contentHash = null)
{
var now = DateTimeOffset.UtcNow;
// Top-level advisory
var advisoryId = Guid.NewGuid();
var payloadJson = doc.Payload.ToJson();
var provenanceJson = JsonSerializer.Serialize(new { source = sourceKey });
var advisory = new AdvisoryEntity
{
Id = advisoryId,
AdvisoryKey = doc.AdvisoryKey,
PrimaryVulnId = doc.Payload.GetValue("primaryVulnId", doc.AdvisoryKey)?.ToString() ?? doc.AdvisoryKey,
SourceId = sourceId,
Title = doc.Payload.GetValue("title", null)?.ToString(),
Summary = doc.Payload.GetValue("summary", null)?.ToString(),
Description = doc.Payload.GetValue("description", null)?.ToString(),
Severity = doc.Payload.GetValue("severity", null)?.ToString(),
PublishedAt = doc.Published.HasValue ? DateTime.SpecifyKind(doc.Published.Value, DateTimeKind.Utc) : null,
ModifiedAt = DateTime.SpecifyKind(doc.Modified, DateTimeKind.Utc),
WithdrawnAt = doc.Payload.TryGetValue("withdrawnAt", out var withdrawn) && withdrawn.IsValidDateTime
? withdrawn.ToUniversalTime()
: null,
Provenance = provenanceJson,
RawPayload = payloadJson,
CreatedAt = now,
UpdatedAt = now
};
// Aliases
var aliases = doc.Payload.TryGetValue("aliases", out var aliasesBson) && aliasesBson.IsBsonArray
? aliasesBson.AsBsonArray.Select(v => v.ToString() ?? string.Empty)
: Enumerable.Empty<string>();
var aliasEntities = aliases
.Where(a => !string.IsNullOrWhiteSpace(a))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(a => a, StringComparer.OrdinalIgnoreCase)
.Select((alias, idx) => new AdvisoryAliasEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
AliasType = alias.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase) ? "CVE" : "OTHER",
AliasValue = alias,
IsPrimary = idx == 0,
CreatedAt = now
})
.ToArray();
// CVSS
var cvssEntities = BuildCvssEntities(doc, advisoryId, now);
// Affected
var affectedEntities = BuildAffectedEntities(doc, advisoryId, now);
// References
var referencesEntities = BuildReferenceEntities(doc, advisoryId, now);
// Credits
var creditEntities = BuildCreditEntities(doc, advisoryId, now);
// Weaknesses
var weaknessEntities = BuildWeaknessEntities(doc, advisoryId, now);
// KEV flags (from payload.kev if present)
var kevEntities = BuildKevEntities(doc, advisoryId, now);
return new Result(
advisory,
aliasEntities,
cvssEntities,
affectedEntities,
referencesEntities,
creditEntities,
weaknessEntities,
kevEntities);
}
private static IReadOnlyList<AdvisoryCvssEntity> BuildCvssEntities(AdvisoryDocument doc, Guid advisoryId, DateTimeOffset now)
{
if (!doc.Payload.TryGetValue("cvss", out var cvssValue) || !cvssValue.IsBsonArray)
{
return Array.Empty<AdvisoryCvssEntity>();
}
return cvssValue.AsBsonArray
.Where(v => v.IsBsonDocument)
.Select(v => v.AsBsonDocument)
.Select(d => new AdvisoryCvssEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CvssVersion = d.GetValue("version", "3.1").ToString() ?? "3.1",
VectorString = d.GetValue("vector", string.Empty).ToString() ?? string.Empty,
BaseScore = d.GetValue("baseScore", 0m).ToDecimal(),
BaseSeverity = d.GetValue("baseSeverity", null)?.ToString(),
ExploitabilityScore = d.GetValue("exploitabilityScore", null)?.ToNullableDecimal(),
ImpactScore = d.GetValue("impactScore", null)?.ToNullableDecimal(),
Source = d.GetValue("source", null)?.ToString(),
IsPrimary = d.GetValue("isPrimary", false).ToBoolean(),
CreatedAt = now
})
.OrderByDescending(c => c.IsPrimary)
.ThenByDescending(c => c.BaseScore)
.ThenBy(c => c.Id)
.ToArray();
}
private static IReadOnlyList<AdvisoryAffectedEntity> BuildAffectedEntities(AdvisoryDocument doc, Guid advisoryId, DateTimeOffset now)
{
if (!doc.Payload.TryGetValue("affected", out var affectedValue) || !affectedValue.IsBsonArray)
{
return Array.Empty<AdvisoryAffectedEntity>();
}
return affectedValue.AsBsonArray
.Where(v => v.IsBsonDocument)
.Select(v => v.AsBsonDocument)
.Select(d => new AdvisoryAffectedEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
Ecosystem = d.GetValue("ecosystem", string.Empty).ToString() ?? string.Empty,
PackageName = d.GetValue("packageName", string.Empty).ToString() ?? string.Empty,
Purl = d.GetValue("purl", null)?.ToString(),
VersionRange = d.GetValue("range", "{}").ToString() ?? "{}",
VersionsAffected = d.TryGetValue("versionsAffected", out var va) && va.IsBsonArray
? va.AsBsonArray.Select(x => x.ToString() ?? string.Empty).ToArray()
: null,
VersionsFixed = d.TryGetValue("versionsFixed", out var vf) && vf.IsBsonArray
? vf.AsBsonArray.Select(x => x.ToString() ?? string.Empty).ToArray()
: null,
DatabaseSpecific = d.GetValue("databaseSpecific", null)?.ToString(),
CreatedAt = now
})
.OrderBy(a => a.Ecosystem)
.ThenBy(a => a.PackageName)
.ThenBy(a => a.Purl)
.ToArray();
}
private static IReadOnlyList<AdvisoryReferenceEntity> BuildReferenceEntities(AdvisoryDocument doc, Guid advisoryId, DateTimeOffset now)
{
if (!doc.Payload.TryGetValue("references", out var referencesValue) || !referencesValue.IsBsonArray)
{
return Array.Empty<AdvisoryReferenceEntity>();
}
return referencesValue.AsBsonArray
.Where(v => v.IsBsonDocument)
.Select(v => v.AsBsonDocument)
.Select(r => new AdvisoryReferenceEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
RefType = r.GetValue("type", "advisory").ToString() ?? "advisory",
Url = r.GetValue("url", string.Empty).ToString() ?? string.Empty,
CreatedAt = now
})
.OrderBy(r => r.Url)
.ToArray();
}
private static IReadOnlyList<AdvisoryCreditEntity> BuildCreditEntities(AdvisoryDocument doc, Guid advisoryId, DateTimeOffset now)
{
if (!doc.Payload.TryGetValue("credits", out var creditsValue) || !creditsValue.IsBsonArray)
{
return Array.Empty<AdvisoryCreditEntity>();
}
return creditsValue.AsBsonArray
.Where(v => v.IsBsonDocument)
.Select(v => v.AsBsonDocument)
.Select(c => new AdvisoryCreditEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
Name = c.GetValue("name", string.Empty).ToString() ?? string.Empty,
Contact = c.GetValue("contact", null)?.ToString(),
CreditType = c.GetValue("type", null)?.ToString(),
CreatedAt = now
})
.OrderBy(c => c.Name)
.ThenBy(c => c.Contact)
.ToArray();
}
private static IReadOnlyList<AdvisoryWeaknessEntity> BuildWeaknessEntities(AdvisoryDocument doc, Guid advisoryId, DateTimeOffset now)
{
if (!doc.Payload.TryGetValue("weaknesses", out var weaknessesValue) || !weaknessesValue.IsBsonArray)
{
return Array.Empty<AdvisoryWeaknessEntity>();
}
return weaknessesValue.AsBsonArray
.Where(v => v.IsBsonDocument)
.Select(v => v.AsBsonDocument)
.Select(w => new AdvisoryWeaknessEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CweId = w.GetValue("cweId", string.Empty).ToString() ?? string.Empty,
Description = w.GetValue("description", null)?.ToString(),
Source = w.GetValue("source", null)?.ToString(),
CreatedAt = now
})
.OrderBy(w => w.CweId)
.ToArray();
}
private static IReadOnlyList<KevFlagEntity> BuildKevEntities(AdvisoryDocument doc, Guid advisoryId, DateTimeOffset now)
{
if (!doc.Payload.TryGetValue("kev", out var kevValue) || !kevValue.IsBsonArray)
{
return Array.Empty<KevFlagEntity>();
}
var today = DateOnly.FromDateTime(now.UtcDateTime);
return kevValue.AsBsonArray
.Where(v => v.IsBsonDocument)
.Select(v => v.AsBsonDocument)
.Select(k => new KevFlagEntity
{
Id = Guid.NewGuid(),
AdvisoryId = advisoryId,
CveId = k.GetValue("cveId", string.Empty).ToString() ?? string.Empty,
VendorProject = k.GetValue("vendorProject", null)?.ToString(),
Product = k.GetValue("product", null)?.ToString(),
VulnerabilityName = k.GetValue("name", null)?.ToString(),
DateAdded = k.TryGetValue("dateAdded", out var dateAdded) && dateAdded.IsValidDateTime
? DateOnly.FromDateTime(dateAdded.ToUniversalTime().Date)
: today,
DueDate = k.TryGetValue("dueDate", out var dueDate) && dueDate.IsValidDateTime
? DateOnly.FromDateTime(dueDate.ToUniversalTime().Date)
: null,
KnownRansomwareUse = k.GetValue("knownRansomwareUse", false).ToBoolean(),
Notes = k.GetValue("notes", null)?.ToString(),
CreatedAt = now
})
.OrderBy(k => k.CveId)
.ToArray();
}
private static decimal ToDecimal(this object value)
=> value switch
{
decimal d => d,
double d => (decimal)d,
float f => (decimal)f,
IConvertible c => c.ToDecimal(null),
_ => 0m
};
private static decimal? ToNullableDecimal(this object? value)
{
if (value is null) return null;
return value switch
{
decimal d => d,
double d => (decimal)d,
float f => (decimal)f,
IConvertible c => c.ToDecimal(null),
_ => null
};
}
}

View File

@@ -1,66 +0,0 @@
using MongoDB.Driver;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Concelier.Storage.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Converters.Importers;
/// <summary>
/// Imports GHSA/vendor advisories from Mongo into PostgreSQL.
/// </summary>
public sealed class GhsaImporter
{
private readonly IMongoCollection<AdvisoryDocument> _collection;
private readonly AdvisoryConversionService _conversionService;
private readonly IFeedSnapshotRepository _feedSnapshots;
private readonly IAdvisorySnapshotRepository _advisorySnapshots;
public GhsaImporter(
IMongoCollection<AdvisoryDocument> collection,
AdvisoryConversionService conversionService,
IFeedSnapshotRepository feedSnapshots,
IAdvisorySnapshotRepository advisorySnapshots)
{
_collection = collection;
_conversionService = conversionService;
_feedSnapshots = feedSnapshots;
_advisorySnapshots = advisorySnapshots;
}
public async Task ImportSnapshotAsync(
Guid sourceId,
string sourceKey,
string snapshotId,
CancellationToken cancellationToken)
{
var advisories = await _collection
.Find(Builders<AdvisoryDocument>.Filter.Empty)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var feedSnapshot = await _feedSnapshots.InsertAsync(new FeedSnapshotEntity
{
Id = Guid.NewGuid(),
SourceId = sourceId,
SnapshotId = snapshotId,
AdvisoryCount = advisories.Count,
Metadata = $"{{\"source\":\"{sourceKey}\"}}",
CreatedAt = DateTimeOffset.UtcNow
}, cancellationToken).ConfigureAwait(false);
foreach (var advisory in advisories)
{
var stored = await _conversionService.ConvertAndUpsertAsync(advisory, sourceKey, sourceId, cancellationToken)
.ConfigureAwait(false);
await _advisorySnapshots.InsertAsync(new AdvisorySnapshotEntity
{
Id = Guid.NewGuid(),
FeedSnapshotId = feedSnapshot.Id,
AdvisoryKey = stored.AdvisoryKey,
ContentHash = advisory.Payload.GetValue("hash", advisory.AdvisoryKey)?.ToString() ?? advisory.AdvisoryKey,
CreatedAt = DateTimeOffset.UtcNow
}, cancellationToken).ConfigureAwait(false);
}
}
}

View File

@@ -1,68 +0,0 @@
using System.Text.Json;
using MongoDB.Driver;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Concelier.Storage.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Converters.Importers;
/// <summary>
/// Imports NVD advisory documents from Mongo into PostgreSQL using the advisory converter.
/// </summary>
public sealed class NvdImporter
{
private readonly IMongoCollection<AdvisoryDocument> _collection;
private readonly AdvisoryConversionService _conversionService;
private readonly IFeedSnapshotRepository _feedSnapshots;
private readonly IAdvisorySnapshotRepository _advisorySnapshots;
public NvdImporter(
IMongoCollection<AdvisoryDocument> collection,
AdvisoryConversionService conversionService,
IFeedSnapshotRepository feedSnapshots,
IAdvisorySnapshotRepository advisorySnapshots)
{
_collection = collection;
_conversionService = conversionService;
_feedSnapshots = feedSnapshots;
_advisorySnapshots = advisorySnapshots;
}
public async Task ImportSnapshotAsync(
Guid sourceId,
string sourceKey,
string snapshotId,
CancellationToken cancellationToken)
{
var advisories = await _collection
.Find(Builders<AdvisoryDocument>.Filter.Empty)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var feedSnapshot = await _feedSnapshots.InsertAsync(new FeedSnapshotEntity
{
Id = Guid.NewGuid(),
SourceId = sourceId,
SnapshotId = snapshotId,
AdvisoryCount = advisories.Count,
Checksum = null,
Metadata = JsonSerializer.Serialize(new { source = sourceKey, snapshot = snapshotId }),
CreatedAt = DateTimeOffset.UtcNow
}, cancellationToken).ConfigureAwait(false);
foreach (var advisory in advisories)
{
var stored = await _conversionService.ConvertAndUpsertAsync(advisory, sourceKey, sourceId, cancellationToken)
.ConfigureAwait(false);
await _advisorySnapshots.InsertAsync(new AdvisorySnapshotEntity
{
Id = Guid.NewGuid(),
FeedSnapshotId = feedSnapshot.Id,
AdvisoryKey = stored.AdvisoryKey,
ContentHash = advisory.Payload.GetValue("hash", advisory.AdvisoryKey)?.ToString() ?? advisory.AdvisoryKey,
CreatedAt = DateTimeOffset.UtcNow
}, cancellationToken).ConfigureAwait(false);
}
}
}

View File

@@ -1,65 +0,0 @@
using MongoDB.Driver;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Postgres.Models;
using StellaOps.Concelier.Storage.Postgres.Repositories;
namespace StellaOps.Concelier.Storage.Postgres.Converters.Importers;
/// <summary>
/// Imports OSV advisories from Mongo into PostgreSQL.
/// </summary>
public sealed class OsvImporter
{
private readonly IMongoCollection<AdvisoryDocument> _collection;
private readonly AdvisoryConversionService _conversionService;
private readonly IFeedSnapshotRepository _feedSnapshots;
private readonly IAdvisorySnapshotRepository _advisorySnapshots;
public OsvImporter(
IMongoCollection<AdvisoryDocument> collection,
AdvisoryConversionService conversionService,
IFeedSnapshotRepository feedSnapshots,
IAdvisorySnapshotRepository advisorySnapshots)
{
_collection = collection;
_conversionService = conversionService;
_feedSnapshots = feedSnapshots;
_advisorySnapshots = advisorySnapshots;
}
public async Task ImportSnapshotAsync(
Guid sourceId,
string snapshotId,
CancellationToken cancellationToken)
{
var advisories = await _collection
.Find(Builders<AdvisoryDocument>.Filter.Empty)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var feedSnapshot = await _feedSnapshots.InsertAsync(new FeedSnapshotEntity
{
Id = Guid.NewGuid(),
SourceId = sourceId,
SnapshotId = snapshotId,
AdvisoryCount = advisories.Count,
Metadata = "{\"source\":\"osv\"}",
CreatedAt = DateTimeOffset.UtcNow
}, cancellationToken).ConfigureAwait(false);
foreach (var advisory in advisories)
{
var stored = await _conversionService.ConvertAndUpsertAsync(advisory, "osv", sourceId, cancellationToken)
.ConfigureAwait(false);
await _advisorySnapshots.InsertAsync(new AdvisorySnapshotEntity
{
Id = Guid.NewGuid(),
FeedSnapshotId = feedSnapshot.Id,
AdvisoryKey = stored.AdvisoryKey,
ContentHash = advisory.Payload.GetValue("hash", advisory.AdvisoryKey)?.ToString() ?? advisory.AdvisoryKey,
CreatedAt = DateTimeOffset.UtcNow
}, cancellationToken).ConfigureAwait(false);
}
}
}

View File

@@ -15,9 +15,11 @@
</ItemGroup>
<ItemGroup>
<!-- Exclude legacy Mongo-based import/conversion helpers until Postgres-native pipeline is ready -->
<!-- Exclude legacy Mongo importers/converters; domain-based converter remains -->
<Compile Remove="Converters\**\*.cs" />
<Compile Remove="Conversion\**\*.cs" />
<Compile Remove="Converters\**\*.cs" />
<Compile Remove="Converters\\**\\*.cs" />
<Compile Remove="Converters/Importers/**\*.cs" />
</ItemGroup>
<ItemGroup>

View File

@@ -0,0 +1,240 @@
using StellaOps.Concelier.Core.Orchestration;
namespace StellaOps.Concelier.Core.Tests.Orchestration;
public sealed class OrchestratorRegistryStoreTests
{
[Fact]
public async Task UpsertAsync_CreatesNewRecord()
{
var store = new InMemoryOrchestratorRegistryStore();
var record = CreateRegistryRecord("tenant-1", "connector-1");
await store.UpsertAsync(record, CancellationToken.None);
var retrieved = await store.GetAsync("tenant-1", "connector-1", CancellationToken.None);
Assert.NotNull(retrieved);
Assert.Equal("tenant-1", retrieved.Tenant);
Assert.Equal("connector-1", retrieved.ConnectorId);
}
[Fact]
public async Task UpsertAsync_UpdatesExistingRecord()
{
var store = new InMemoryOrchestratorRegistryStore();
var record1 = CreateRegistryRecord("tenant-1", "connector-1", source: "nvd");
var record2 = CreateRegistryRecord("tenant-1", "connector-1", source: "osv");
await store.UpsertAsync(record1, CancellationToken.None);
await store.UpsertAsync(record2, CancellationToken.None);
var retrieved = await store.GetAsync("tenant-1", "connector-1", CancellationToken.None);
Assert.NotNull(retrieved);
Assert.Equal("osv", retrieved.Source);
}
[Fact]
public async Task GetAsync_ReturnsNullForNonExistentRecord()
{
var store = new InMemoryOrchestratorRegistryStore();
var retrieved = await store.GetAsync("tenant-1", "nonexistent", CancellationToken.None);
Assert.Null(retrieved);
}
[Fact]
public async Task ListAsync_ReturnsRecordsForTenant()
{
var store = new InMemoryOrchestratorRegistryStore();
await store.UpsertAsync(CreateRegistryRecord("tenant-1", "connector-a"), CancellationToken.None);
await store.UpsertAsync(CreateRegistryRecord("tenant-1", "connector-b"), CancellationToken.None);
await store.UpsertAsync(CreateRegistryRecord("tenant-2", "connector-c"), CancellationToken.None);
var records = await store.ListAsync("tenant-1", CancellationToken.None);
Assert.Equal(2, records.Count);
Assert.All(records, r => Assert.Equal("tenant-1", r.Tenant));
}
[Fact]
public async Task ListAsync_ReturnsOrderedByConnectorId()
{
var store = new InMemoryOrchestratorRegistryStore();
await store.UpsertAsync(CreateRegistryRecord("tenant-1", "zzz-connector"), CancellationToken.None);
await store.UpsertAsync(CreateRegistryRecord("tenant-1", "aaa-connector"), CancellationToken.None);
var records = await store.ListAsync("tenant-1", CancellationToken.None);
Assert.Equal("aaa-connector", records[0].ConnectorId);
Assert.Equal("zzz-connector", records[1].ConnectorId);
}
[Fact]
public async Task AppendHeartbeatAsync_StoresHeartbeat()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
var heartbeat = new OrchestratorHeartbeatRecord(
"tenant-1", "connector-1", runId, 1,
OrchestratorHeartbeatStatus.Running, 50, 10,
null, null, null, null, DateTimeOffset.UtcNow);
await store.AppendHeartbeatAsync(heartbeat, CancellationToken.None);
var latest = await store.GetLatestHeartbeatAsync("tenant-1", "connector-1", runId, CancellationToken.None);
Assert.NotNull(latest);
Assert.Equal(1, latest.Sequence);
Assert.Equal(OrchestratorHeartbeatStatus.Running, latest.Status);
}
[Fact]
public async Task GetLatestHeartbeatAsync_ReturnsHighestSequence()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
await store.AppendHeartbeatAsync(CreateHeartbeat("tenant-1", "connector-1", runId, 1, OrchestratorHeartbeatStatus.Starting, now), CancellationToken.None);
await store.AppendHeartbeatAsync(CreateHeartbeat("tenant-1", "connector-1", runId, 3, OrchestratorHeartbeatStatus.Succeeded, now.AddMinutes(2)), CancellationToken.None);
await store.AppendHeartbeatAsync(CreateHeartbeat("tenant-1", "connector-1", runId, 2, OrchestratorHeartbeatStatus.Running, now.AddMinutes(1)), CancellationToken.None);
var latest = await store.GetLatestHeartbeatAsync("tenant-1", "connector-1", runId, CancellationToken.None);
Assert.NotNull(latest);
Assert.Equal(3, latest.Sequence);
Assert.Equal(OrchestratorHeartbeatStatus.Succeeded, latest.Status);
}
[Fact]
public async Task EnqueueCommandAsync_StoresCommand()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
var command = new OrchestratorCommandRecord(
"tenant-1", "connector-1", runId, 1,
OrchestratorCommandKind.Pause, null, null,
DateTimeOffset.UtcNow, null);
await store.EnqueueCommandAsync(command, CancellationToken.None);
var commands = await store.GetPendingCommandsAsync("tenant-1", "connector-1", runId, null, CancellationToken.None);
Assert.Single(commands);
Assert.Equal(OrchestratorCommandKind.Pause, commands[0].Command);
}
[Fact]
public async Task GetPendingCommandsAsync_FiltersAfterSequence()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
await store.EnqueueCommandAsync(CreateCommand("tenant-1", "connector-1", runId, 1, OrchestratorCommandKind.Pause, now), CancellationToken.None);
await store.EnqueueCommandAsync(CreateCommand("tenant-1", "connector-1", runId, 2, OrchestratorCommandKind.Resume, now), CancellationToken.None);
await store.EnqueueCommandAsync(CreateCommand("tenant-1", "connector-1", runId, 3, OrchestratorCommandKind.Throttle, now), CancellationToken.None);
var commands = await store.GetPendingCommandsAsync("tenant-1", "connector-1", runId, 1, CancellationToken.None);
Assert.Equal(2, commands.Count);
Assert.Equal(2, commands[0].Sequence);
Assert.Equal(3, commands[1].Sequence);
}
[Fact]
public async Task GetPendingCommandsAsync_ExcludesExpiredCommands()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
var now = DateTimeOffset.UtcNow;
var expired = now.AddMinutes(-5);
var future = now.AddMinutes(5);
await store.EnqueueCommandAsync(CreateCommand("tenant-1", "connector-1", runId, 1, OrchestratorCommandKind.Pause, now, expired), CancellationToken.None);
await store.EnqueueCommandAsync(CreateCommand("tenant-1", "connector-1", runId, 2, OrchestratorCommandKind.Resume, now, future), CancellationToken.None);
var commands = await store.GetPendingCommandsAsync("tenant-1", "connector-1", runId, null, CancellationToken.None);
Assert.Single(commands);
Assert.Equal(2, commands[0].Sequence);
}
[Fact]
public async Task StoreManifestAsync_StoresManifest()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
var manifest = new OrchestratorRunManifest(
runId, "connector-1", "tenant-1",
new OrchestratorBackfillRange("cursor-a", "cursor-z"),
["hash1", "hash2"],
"dsse-hash",
DateTimeOffset.UtcNow);
await store.StoreManifestAsync(manifest, CancellationToken.None);
var retrieved = await store.GetManifestAsync("tenant-1", "connector-1", runId, CancellationToken.None);
Assert.NotNull(retrieved);
Assert.Equal(runId, retrieved.RunId);
Assert.Equal(2, retrieved.ArtifactHashes.Count);
Assert.Equal("dsse-hash", retrieved.DsseEnvelopeHash);
}
[Fact]
public async Task GetManifestAsync_ReturnsNullForNonExistentManifest()
{
var store = new InMemoryOrchestratorRegistryStore();
var manifest = await store.GetManifestAsync("tenant-1", "connector-1", Guid.NewGuid(), CancellationToken.None);
Assert.Null(manifest);
}
[Fact]
public void Clear_RemovesAllData()
{
var store = new InMemoryOrchestratorRegistryStore();
var runId = Guid.NewGuid();
store.UpsertAsync(CreateRegistryRecord("tenant-1", "connector-1"), CancellationToken.None).Wait();
store.AppendHeartbeatAsync(CreateHeartbeat("tenant-1", "connector-1", runId, 1, OrchestratorHeartbeatStatus.Running, DateTimeOffset.UtcNow), CancellationToken.None).Wait();
store.Clear();
Assert.Null(store.GetAsync("tenant-1", "connector-1", CancellationToken.None).Result);
Assert.Null(store.GetLatestHeartbeatAsync("tenant-1", "connector-1", runId, CancellationToken.None).Result);
}
private static OrchestratorRegistryRecord CreateRegistryRecord(string tenant, string connectorId, string source = "nvd")
{
return new OrchestratorRegistryRecord(
tenant, connectorId, source,
["observations"],
"secret:ref",
new OrchestratorSchedule("0 * * * *", "UTC", 1, 60),
new OrchestratorRatePolicy(100, 10, 30),
["raw-advisory"],
$"concelier:{tenant}:{connectorId}",
new OrchestratorEgressGuard(["example.com"], false),
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow);
}
private static OrchestratorHeartbeatRecord CreateHeartbeat(
string tenant, string connectorId, Guid runId, long sequence,
OrchestratorHeartbeatStatus status, DateTimeOffset timestamp)
{
return new OrchestratorHeartbeatRecord(
tenant, connectorId, runId, sequence, status,
null, null, null, null, null, null, timestamp);
}
private static OrchestratorCommandRecord CreateCommand(
string tenant, string connectorId, Guid runId, long sequence,
OrchestratorCommandKind command, DateTimeOffset createdAt, DateTimeOffset? expiresAt = null)
{
return new OrchestratorCommandRecord(
tenant, connectorId, runId, sequence, command,
null, null, createdAt, expiresAt);
}
}

View File

@@ -0,0 +1,369 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Concelier.Core.Signals;
namespace StellaOps.Concelier.Core.Tests.Signals;
public sealed class AffectedSymbolProviderTests
{
private readonly FakeTimeProvider _timeProvider = new(DateTimeOffset.UtcNow);
[Fact]
public async Task GetByAdvisoryAsync_ReturnsEmptySetForUnknownAdvisory()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var result = await provider.GetByAdvisoryAsync("tenant-1", "CVE-2024-0001", CancellationToken.None);
Assert.Equal("tenant-1", result.TenantId);
Assert.Equal("CVE-2024-0001", result.AdvisoryId);
Assert.Empty(result.Symbols);
Assert.Empty(result.SourceSummaries);
Assert.Equal(0, result.UniqueSymbolCount);
}
[Fact]
public async Task GetByAdvisoryAsync_ReturnsStoredSymbols()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromOsv(
observationHash: "sha256:abc123",
fetchedAt: _timeProvider.GetUtcNow(),
ingestJobId: "job-001",
osvId: "GHSA-1234-5678-9abc");
var symbol = AffectedSymbol.Function(
tenantId: "tenant-1",
advisoryId: "CVE-2024-0001",
observationId: "obs-001",
symbol: "lodash.template",
provenance: provenance,
extractedAt: _timeProvider.GetUtcNow(),
purl: "pkg:npm/lodash@4.17.21",
module: "lodash",
versionRange: "<4.17.21");
await store.StoreAsync([symbol], CancellationToken.None);
var result = await provider.GetByAdvisoryAsync("tenant-1", "CVE-2024-0001", CancellationToken.None);
Assert.Single(result.Symbols);
Assert.Equal("lodash.template", result.Symbols[0].Symbol);
Assert.Equal(AffectedSymbolType.Function, result.Symbols[0].SymbolType);
Assert.Equal("osv", result.Symbols[0].Provenance.Source);
}
[Fact]
public async Task GetByAdvisoryAsync_ComputesSourceSummaries()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var osvProvenance = AffectedSymbolProvenance.FromOsv(
"sha256:abc", _timeProvider.GetUtcNow());
var nvdProvenance = AffectedSymbolProvenance.FromNvd(
"sha256:def", _timeProvider.GetUtcNow(), cveId: "CVE-2024-0001");
var symbols = new List<AffectedSymbol>
{
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-1", "func1", osvProvenance, _timeProvider.GetUtcNow()),
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-2", "func2", osvProvenance, _timeProvider.GetUtcNow()),
AffectedSymbol.Method("tenant-1", "CVE-2024-0001", "obs-3", "method1", "ClassName", nvdProvenance, _timeProvider.GetUtcNow())
};
await store.StoreAsync(symbols, CancellationToken.None);
var result = await provider.GetByAdvisoryAsync("tenant-1", "CVE-2024-0001", CancellationToken.None);
Assert.Equal(3, result.Symbols.Length);
Assert.Equal(2, result.SourceSummaries.Length);
var osvSummary = result.SourceSummaries.First(s => s.Source == "osv");
Assert.Equal(2, osvSummary.SymbolCount);
Assert.Equal(2, osvSummary.CountByType[AffectedSymbolType.Function]);
var nvdSummary = result.SourceSummaries.First(s => s.Source == "nvd");
Assert.Equal(1, nvdSummary.SymbolCount);
Assert.Equal(1, nvdSummary.CountByType[AffectedSymbolType.Method]);
}
[Fact]
public async Task GetByPackageAsync_ReturnsSymbolsForPackage()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromGhsa(
"sha256:ghi", _timeProvider.GetUtcNow(), ghsaId: "GHSA-abcd-efgh-ijkl");
var symbol = AffectedSymbol.Function(
tenantId: "tenant-1",
advisoryId: "CVE-2024-0002",
observationId: "obs-001",
symbol: "express.render",
provenance: provenance,
extractedAt: _timeProvider.GetUtcNow(),
purl: "pkg:npm/express@4.18.0");
await store.StoreAsync([symbol], CancellationToken.None);
var result = await provider.GetByPackageAsync("tenant-1", "pkg:npm/express@4.18.0", CancellationToken.None);
Assert.Single(result.Symbols);
Assert.Equal("express.render", result.Symbols[0].Symbol);
}
[Fact]
public async Task QueryAsync_FiltersByAdvisoryId()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", _timeProvider.GetUtcNow());
var symbols = new List<AffectedSymbol>
{
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-1", "func1", provenance, _timeProvider.GetUtcNow()),
AffectedSymbol.Function("tenant-1", "CVE-2024-0002", "obs-2", "func2", provenance, _timeProvider.GetUtcNow())
};
await store.StoreAsync(symbols, CancellationToken.None);
var options = AffectedSymbolQueryOptions.ForAdvisory("tenant-1", "CVE-2024-0001");
var result = await provider.QueryAsync(options, CancellationToken.None);
Assert.Equal(1, result.TotalCount);
Assert.Single(result.Symbols);
Assert.Equal("func1", result.Symbols[0].Symbol);
}
[Fact]
public async Task QueryAsync_FiltersBySymbolType()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", _timeProvider.GetUtcNow());
var symbols = new List<AffectedSymbol>
{
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-1", "func1", provenance, _timeProvider.GetUtcNow()),
AffectedSymbol.Method("tenant-1", "CVE-2024-0001", "obs-2", "method1", "Class1", provenance, _timeProvider.GetUtcNow())
};
await store.StoreAsync(symbols, CancellationToken.None);
var options = new AffectedSymbolQueryOptions(
TenantId: "tenant-1",
SymbolTypes: [AffectedSymbolType.Method]);
var result = await provider.QueryAsync(options, CancellationToken.None);
Assert.Equal(1, result.TotalCount);
Assert.Equal(AffectedSymbolType.Method, result.Symbols[0].SymbolType);
}
[Fact]
public async Task QueryAsync_SupportsPagination()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", _timeProvider.GetUtcNow());
var symbols = Enumerable.Range(1, 10)
.Select(i => AffectedSymbol.Function(
"tenant-1", "CVE-2024-0001", $"obs-{i}", $"func{i}", provenance, _timeProvider.GetUtcNow()))
.ToList();
await store.StoreAsync(symbols, CancellationToken.None);
var options = new AffectedSymbolQueryOptions(
TenantId: "tenant-1",
Limit: 3,
Offset: 2);
var result = await provider.QueryAsync(options, CancellationToken.None);
Assert.Equal(10, result.TotalCount);
Assert.Equal(3, result.Symbols.Length);
Assert.True(result.HasMore);
}
[Fact]
public async Task GetByAdvisoriesBatchAsync_ReturnsBatchResults()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", _timeProvider.GetUtcNow());
var symbols = new List<AffectedSymbol>
{
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-1", "func1", provenance, _timeProvider.GetUtcNow()),
AffectedSymbol.Function("tenant-1", "CVE-2024-0002", "obs-2", "func2", provenance, _timeProvider.GetUtcNow())
};
await store.StoreAsync(symbols, CancellationToken.None);
var result = await provider.GetByAdvisoriesBatchAsync(
"tenant-1",
["CVE-2024-0001", "CVE-2024-0002", "CVE-2024-0003"],
CancellationToken.None);
Assert.Equal(3, result.Count);
Assert.Single(result["CVE-2024-0001"].Symbols);
Assert.Single(result["CVE-2024-0002"].Symbols);
Assert.Empty(result["CVE-2024-0003"].Symbols);
}
[Fact]
public async Task HasSymbolsAsync_ReturnsTrueWhenSymbolsExist()
{
var store = new InMemoryAffectedSymbolStore();
var provider = new AffectedSymbolProvider(
store,
_timeProvider,
NullLogger<AffectedSymbolProvider>.Instance);
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", _timeProvider.GetUtcNow());
var symbol = AffectedSymbol.Function(
"tenant-1", "CVE-2024-0001", "obs-1", "func1", provenance, _timeProvider.GetUtcNow());
await store.StoreAsync([symbol], CancellationToken.None);
var exists = await provider.HasSymbolsAsync("tenant-1", "CVE-2024-0001", CancellationToken.None);
var notExists = await provider.HasSymbolsAsync("tenant-1", "CVE-2024-9999", CancellationToken.None);
Assert.True(exists);
Assert.False(notExists);
}
[Fact]
public void AffectedSymbol_CanonicalId_GeneratesCorrectFormat()
{
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", DateTimeOffset.UtcNow);
var function = AffectedSymbol.Function(
"tenant-1", "CVE-2024-0001", "obs-1", "myFunc", provenance, DateTimeOffset.UtcNow,
module: "myModule");
Assert.Equal("myModule::myFunc", function.CanonicalId);
var method = AffectedSymbol.Method(
"tenant-1", "CVE-2024-0001", "obs-1", "myMethod", "MyClass", provenance, DateTimeOffset.UtcNow,
module: "myModule");
Assert.Equal("myModule::MyClass.myMethod", method.CanonicalId);
var globalFunc = AffectedSymbol.Function(
"tenant-1", "CVE-2024-0001", "obs-1", "globalFunc", provenance, DateTimeOffset.UtcNow);
Assert.Equal("global::globalFunc", globalFunc.CanonicalId);
}
[Fact]
public void AffectedSymbol_HasSourceLocation_ReturnsCorrectValue()
{
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", DateTimeOffset.UtcNow);
var withLocation = AffectedSymbol.Function(
"tenant-1", "CVE-2024-0001", "obs-1", "func1", provenance, DateTimeOffset.UtcNow,
filePath: "/src/lib.js", lineNumber: 42);
Assert.True(withLocation.HasSourceLocation);
var withoutLocation = AffectedSymbol.Function(
"tenant-1", "CVE-2024-0001", "obs-1", "func2", provenance, DateTimeOffset.UtcNow);
Assert.False(withoutLocation.HasSourceLocation);
}
[Fact]
public void AffectedSymbolSet_UniqueSymbolCount_CountsDistinctCanonicalIds()
{
var provenance = AffectedSymbolProvenance.FromOsv("sha256:test", DateTimeOffset.UtcNow);
var symbols = ImmutableArray.Create(
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-1", "func1", provenance, DateTimeOffset.UtcNow, module: "mod1"),
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-2", "func1", provenance, DateTimeOffset.UtcNow, module: "mod1"), // duplicate
AffectedSymbol.Function("tenant-1", "CVE-2024-0001", "obs-3", "func2", provenance, DateTimeOffset.UtcNow, module: "mod1")
);
var set = new AffectedSymbolSet(
"tenant-1", "CVE-2024-0001", symbols,
ImmutableArray<AffectedSymbolSourceSummary>.Empty, DateTimeOffset.UtcNow);
Assert.Equal(2, set.UniqueSymbolCount);
}
[Fact]
public void AffectedSymbolProvenance_FromOsv_CreatesCorrectProvenance()
{
var now = DateTimeOffset.UtcNow;
var provenance = AffectedSymbolProvenance.FromOsv(
observationHash: "sha256:abc123",
fetchedAt: now,
ingestJobId: "job-001",
osvId: "GHSA-1234-5678-9abc");
Assert.Equal("osv", provenance.Source);
Assert.Equal("open-source-vulnerabilities", provenance.Vendor);
Assert.Equal("sha256:abc123", provenance.ObservationHash);
Assert.Equal(now, provenance.FetchedAt);
Assert.Equal("job-001", provenance.IngestJobId);
Assert.Equal("GHSA-1234-5678-9abc", provenance.UpstreamId);
Assert.Equal("https://osv.dev/vulnerability/GHSA-1234-5678-9abc", provenance.UpstreamUrl);
}
[Fact]
public void AffectedSymbolProvenance_FromNvd_CreatesCorrectProvenance()
{
var now = DateTimeOffset.UtcNow;
var provenance = AffectedSymbolProvenance.FromNvd(
observationHash: "sha256:def456",
fetchedAt: now,
cveId: "CVE-2024-0001");
Assert.Equal("nvd", provenance.Source);
Assert.Equal("national-vulnerability-database", provenance.Vendor);
Assert.Equal("CVE-2024-0001", provenance.UpstreamId);
Assert.Equal("https://nvd.nist.gov/vuln/detail/CVE-2024-0001", provenance.UpstreamUrl);
}
[Fact]
public void AffectedSymbolProvenance_FromGhsa_CreatesCorrectProvenance()
{
var now = DateTimeOffset.UtcNow;
var provenance = AffectedSymbolProvenance.FromGhsa(
observationHash: "sha256:ghi789",
fetchedAt: now,
ghsaId: "GHSA-abcd-efgh-ijkl");
Assert.Equal("ghsa", provenance.Source);
Assert.Equal("github-security-advisories", provenance.Vendor);
Assert.Equal("GHSA-abcd-efgh-ijkl", provenance.UpstreamId);
Assert.Equal("https://github.com/advisories/GHSA-abcd-efgh-ijkl", provenance.UpstreamUrl);
}
}

View File

@@ -11,7 +11,7 @@ using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Orchestrator;
using StellaOps.Concelier.Core.Orchestration;
using StellaOps.Concelier.WebService;
using StellaOps.Concelier.WebService.Options;
using Xunit;
@@ -53,7 +53,7 @@ public sealed class OrchestratorTestWebAppFactory : WebApplicationFactory<Progra
builder.ConfigureServices(services =>
{
services.RemoveAll<IOrchestratorRegistryStore>();
services.AddSingleton<IOrchestratorRegistryStore, InMemoryOrchestratorStore>();
services.AddSingleton<IOrchestratorRegistryStore, InMemoryOrchestratorRegistryStore>();
// Pre-bind options to keep Program from trying to rebind/validate during tests.
services.RemoveAll<ConcelierOptions>();
@@ -155,42 +155,3 @@ public sealed class OrchestratorEndpointsTests : IClassFixture<OrchestratorTestW
}
}
internal sealed class InMemoryOrchestratorStore : IOrchestratorRegistryStore
{
private readonly Dictionary<(string Tenant, string ConnectorId), OrchestratorRegistryRecord> _registry = new();
private readonly List<OrchestratorHeartbeatRecord> _heartbeats = new();
private readonly List<OrchestratorCommandRecord> _commands = new();
public Task UpsertAsync(OrchestratorRegistryRecord record, CancellationToken cancellationToken)
{
_registry[(record.Tenant, record.ConnectorId)] = record;
return Task.CompletedTask;
}
public Task<OrchestratorRegistryRecord?> GetAsync(string tenant, string connectorId, CancellationToken cancellationToken)
{
_registry.TryGetValue((tenant, connectorId), out var record);
return Task.FromResult(record);
}
public Task EnqueueCommandAsync(OrchestratorCommandRecord command, CancellationToken cancellationToken)
{
_commands.Add(command);
return Task.CompletedTask;
}
public Task<IReadOnlyList<OrchestratorCommandRecord>> GetPendingCommandsAsync(string tenant, string connectorId, Guid runId, long? afterSequence, CancellationToken cancellationToken)
{
var items = _commands
.Where(c => c.Tenant == tenant && c.ConnectorId == connectorId && c.RunId == runId && (afterSequence is null || c.Sequence > afterSequence))
.ToList()
.AsReadOnly();
return Task.FromResult<IReadOnlyList<OrchestratorCommandRecord>>(items);
}
public Task AppendHeartbeatAsync(OrchestratorHeartbeatRecord heartbeat, CancellationToken cancellationToken)
{
_heartbeats.Add(heartbeat);
return Task.CompletedTask;
}
}