up teams work
Some checks failed
Feedser CI / build-and-test (push) Has been cancelled

This commit is contained in:
2025-10-06 00:13:02 +03:00
parent b0c3fa10fb
commit b6ef66e057
518 changed files with 43972 additions and 654 deletions

View File

@@ -0,0 +1,29 @@
name: Feedser CI
on:
push:
branches: ["main", "develop"]
pull_request:
branches: ["main", "develop"]
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Setup .NET 10 preview
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.100-rc.1.25451.107
include-prerelease: true
- name: Restore dependencies
run: dotnet restore src/StellaOps.Feedser/StellaOps.Feedser.sln
- name: Build
run: dotnet build src/StellaOps.Feedser/StellaOps.Feedser.sln --configuration Release --no-restore -warnaserror
- name: Test
run: dotnet test src/StellaOps.Feedser/StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --configuration Release --no-restore --logger "trx;LogFileName=feedser-tests.trx"

View File

@@ -0,0 +1,87 @@
name: Feedser Tests CI
on:
push:
paths:
- 'StellaOps.Feedser/**'
- '.gitea/workflows/feedser-tests.yml'
pull_request:
paths:
- 'StellaOps.Feedser/**'
- '.gitea/workflows/feedser-tests.yml'
jobs:
advisory-store-performance:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up .NET SDK
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.100-rc.1
- name: Restore dependencies
working-directory: StellaOps.Feedser
run: dotnet restore StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj
- name: Run advisory store performance test
working-directory: StellaOps.Feedser
run: |
set -euo pipefail
dotnet test \
StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj \
--filter "FullyQualifiedName~AdvisoryStorePerformanceTests" \
--logger:"console;verbosity=detailed" | tee performance.log
- name: Upload performance log
if: always()
uses: actions/upload-artifact@v4
with:
name: advisory-store-performance-log
path: StellaOps.Feedser/performance.log
full-test-suite:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up .NET SDK
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.100-rc.1
- name: Restore dependencies
working-directory: StellaOps.Feedser
run: dotnet restore StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj
- name: Run full test suite with baseline guard
working-directory: StellaOps.Feedser
env:
BASELINE_SECONDS: "19.8"
TOLERANCE_PERCENT: "25"
run: |
set -euo pipefail
start=$(date +%s)
dotnet test StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --no-build | tee full-tests.log
end=$(date +%s)
duration=$((end-start))
echo "Full test duration: ${duration}s"
export DURATION_SECONDS="$duration"
python - <<'PY'
import os, sys
duration = float(os.environ["DURATION_SECONDS"])
baseline = float(os.environ["BASELINE_SECONDS"])
tolerance = float(os.environ["TOLERANCE_PERCENT"])
threshold = baseline * (1 + tolerance / 100)
print(f"Baseline {baseline:.1f}s, threshold {threshold:.1f}s, observed {duration:.1f}s")
if duration > threshold:
sys.exit(f"Full test duration {duration:.1f}s exceeded threshold {threshold:.1f}s")
PY
- name: Upload full test log
if: always()
uses: actions/upload-artifact@v4
with:
name: full-test-suite-log
path: StellaOps.Feedser/full-tests.log

46
src/Jobs.cs Normal file
View File

@@ -0,0 +1,46 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Feedser.Core.Jobs;
namespace StellaOps.Feedser.Source.Vndr.Oracle;
internal static class OracleJobKinds
{
public const string Fetch = "source:vndr-oracle:fetch";
public const string Parse = "source:vndr-oracle:parse";
public const string Map = "source:vndr-oracle:map";
}
internal sealed class OracleFetchJob : IJob
{
private readonly OracleConnector _connector;
public OracleFetchJob(OracleConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class OracleParseJob : IJob
{
private readonly OracleConnector _connector;
public OracleParseJob(OracleConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class OracleMapJob : IJob
{
private readonly OracleConnector _connector;
public OracleMapJob(OracleConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

293
src/OracleConnector.cs Normal file
View File

@@ -0,0 +1,293 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Feedser.Source.Common;
using StellaOps.Feedser.Source.Common.Fetch;
using StellaOps.Feedser.Source.Vndr.Oracle.Configuration;
using StellaOps.Feedser.Source.Vndr.Oracle.Internal;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Documents;
using StellaOps.Feedser.Storage.Mongo.Dtos;
using StellaOps.Feedser.Storage.Mongo.PsirtFlags;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Vndr.Oracle;
public sealed class OracleConnector : IFeedConnector
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
};
private readonly SourceFetchService _fetchService;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly IPsirtFlagStore _psirtFlagStore;
private readonly ISourceStateRepository _stateRepository;
private readonly OracleOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<OracleConnector> _logger;
public OracleConnector(
SourceFetchService fetchService,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
IPsirtFlagStore psirtFlagStore,
ISourceStateRepository stateRepository,
IOptions<OracleOptions> options,
TimeProvider? timeProvider,
ILogger<OracleConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_psirtFlagStore = psirtFlagStore ?? throw new ArgumentNullException(nameof(psirtFlagStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string SourceName => VndrOracleConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var pendingDocuments = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
var now = _timeProvider.GetUtcNow();
foreach (var uri in _options.AdvisoryUris)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var advisoryId = DeriveAdvisoryId(uri);
var title = advisoryId.Replace('-', ' ');
var published = now;
var metadata = OracleDocumentMetadata.CreateMetadata(advisoryId, title, published);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, uri.ToString(), cancellationToken).ConfigureAwait(false);
var request = new SourceFetchRequest(OracleOptions.HttpClientName, SourceName, uri)
{
Metadata = metadata,
ETag = existing?.Etag,
LastModified = existing?.LastModified,
AcceptHeaders = new[] { "text/html", "application/xhtml+xml", "text/plain;q=0.5" },
};
var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
if (!result.IsSuccess || result.Document is null)
{
continue;
}
if (!pendingDocuments.Contains(result.Document.Id))
{
pendingDocuments.Add(result.Document.Id);
}
if (_options.RequestDelay > TimeSpan.Zero)
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Oracle fetch failed for {Uri}", uri);
await _stateRepository.MarkFailureAsync(SourceName, _timeProvider.GetUtcNow(), TimeSpan.FromMinutes(10), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithLastProcessed(now);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var pendingDocuments = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
if (!document.GridFsId.HasValue)
{
_logger.LogWarning("Oracle document {DocumentId} missing GridFS payload", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
OracleDto dto;
try
{
var metadata = OracleDocumentMetadata.FromDocument(document);
var content = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
var html = System.Text.Encoding.UTF8.GetString(content);
dto = OracleParser.Parse(html, metadata);
}
catch (Exception ex)
{
_logger.LogError(ex, "Oracle parse failed for document {DocumentId}", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
var json = JsonSerializer.Serialize(dto, SerializerOptions);
var payload = BsonDocument.Parse(json);
var validatedAt = _timeProvider.GetUtcNow();
var existingDto = await _dtoStore.FindByDocumentIdAsync(document.Id, cancellationToken).ConfigureAwait(false);
var dtoRecord = existingDto is null
? new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "oracle.advisory.v1", payload, validatedAt)
: existingDto with
{
Payload = payload,
SchemaVersion = "oracle.advisory.v1",
ValidatedAt = validatedAt,
};
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
if (!pendingMappings.Contains(documentId))
{
pendingMappings.Add(documentId);
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null || document is null)
{
pendingMappings.Remove(documentId);
continue;
}
OracleDto? dto;
try
{
var json = dtoRecord.Payload.ToJson();
dto = JsonSerializer.Deserialize<OracleDto>(json, SerializerOptions);
}
catch (Exception ex)
{
_logger.LogError(ex, "Oracle DTO deserialization failed for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
if (dto is null)
{
_logger.LogWarning("Oracle DTO payload deserialized as null for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
var mappedAt = _timeProvider.GetUtcNow();
var (advisory, flag) = OracleMapper.Map(dto, SourceName, mappedAt);
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _psirtFlagStore.UpsertAsync(flag, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<OracleCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return OracleCursor.FromBson(record?.Cursor);
}
private async Task UpdateCursorAsync(OracleCursor cursor, CancellationToken cancellationToken)
{
var completedAt = _timeProvider.GetUtcNow();
await _stateRepository.UpdateCursorAsync(SourceName, cursor.ToBsonDocument(), completedAt, cancellationToken).ConfigureAwait(false);
}
private static string DeriveAdvisoryId(Uri uri)
{
var segments = uri.Segments;
if (segments.Length == 0)
{
return uri.AbsoluteUri;
}
var slug = segments[^1].Trim('/');
if (string.IsNullOrWhiteSpace(slug))
{
return uri.AbsoluteUri;
}
return slug.Replace('.', '-');
}
}

View File

@@ -0,0 +1,21 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Source.Vndr.Oracle;
public sealed class VndrOracleConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "vndr-oracle";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services)
=> services.GetService<OracleConnector>() is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return services.GetRequiredService<OracleConnector>();
}
}

View File

@@ -0,0 +1,54 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
using StellaOps.Feedser.Core.Jobs;
using StellaOps.Feedser.Source.Vndr.Oracle.Configuration;
namespace StellaOps.Feedser.Source.Vndr.Oracle;
public sealed class OracleDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "feedser:sources:oracle";
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddOracleConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
services.AddTransient<OracleFetchJob>();
services.AddTransient<OracleParseJob>();
services.AddTransient<OracleMapJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
EnsureJob(options, OracleJobKinds.Fetch, typeof(OracleFetchJob));
EnsureJob(options, OracleJobKinds.Parse, typeof(OracleParseJob));
EnsureJob(options, OracleJobKinds.Map, typeof(OracleMapJob));
});
return services;
}
private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType)
{
if (options.Definitions.ContainsKey(kind))
{
return;
}
options.Definitions[kind] = new JobDefinition(
kind,
jobType,
options.DefaultTimeout,
options.DefaultLeaseDuration,
CronExpression: null,
Enabled: true);
}
}

View File

@@ -0,0 +1,128 @@
## Autonomous Agent Instructions
Before you act you need to read `/src/StellaOps.Feedser/AGENTS.md` files `AGENTS.md`,`TASKS.md` in each working directory I gave you.
Boundaries:
- You operate only in the working directories I gave you, unless there is dependencies that makes you to work on dependency in shared directory. Then you ask for confirmation.
Do:
- Keep endpoints small, deterministic, and cancellation-aware.
- Improve logs/metrics as per tasks.
- Update `TASKS.md` when moving tasks forward.
- When you are done with all task you state explicitly you are done.
- Impersonate the role described on working directory `AGENTS.md` you will read, if role is not available - take role of the CTO of the StellaOps in early stages.
Output:
- Summary of changes and any cross-module requests.
# StellaOps — Agent Operations Guide (Master)
> Purpose: Orient all human + autonomous agents to the StellaOps platform, its data flows, component boundaries, and rules of engagement so teams can work **in parallel on the same branch** with minimal contention.
---
## 1) What is StellaOps?
**StellaOps** is a sovereign/offline-first container & infrastructure security platform. Its core loop:
1. **Intelligence** — Ingest vulnerability advisories from primary sources.
2. **Normalization & Merge** — Reconcile into a canonical, deduplicated database with deterministic precedence.
3. **Distribution** — Export a Trivy-compatible database (OCI artifact) and optional vuln-list JSON for self-hosted scanners.
4. **Scanning & Policy** — Scanners consume that DB; policy engines gate deployments; artifacts may be signed downstream.
This repositorys focus is the **Feedser** service (ingest/merge/export).
---
## 2) Highlevel architecture (technical overview)
```
```
[Primary Sources: NVD, GHSA/OSV, Distros, PSIRTs, CERTs, KEV, ICS]
(Fetch + Validate DTOs)
[Normalizer → Canonical Advisory]
(Alias graph + Precedence)
[FeedMerge Store (MongoDB)]
┌──────────────┴──────────────┐
▼ ▼
[Export: vulnlist JSON] [Packager: Trivy DB (OCI)]
│ │
└────────────► Distribution (ORAS / offline bundle)
```
```
**Key invariants**
- **Deterministic**: same inputs → same canonical JSON → same export digests.
- **Precedence**: **distro OVAL/PSIRT > NVD** for OS packages; **KEV only flags exploitation**; regional CERTs enrich text/refs.
- **Provenance** everywhere: source document, extraction method (`parser|llm`), and timestamps.
You have to read `./ARCHITECTURE.md` for more information.
---
## 4) Main agents (roles, interactions, scope)
- **BEBase (Platform & Pipeline)**
Owns DI, plugin host, job scheduler/coordinator, configuration binding, minimal API endpoints, and Mongo bootstrapping.
- **BEConnX (Connectors)**
One agent per source family (NVD, Red Hat, Ubuntu, Debian, SUSE, GHSA, OSV, PSIRTs, CERTs, KEV, ICS). Implements fetch/parse/map with incremental watermarks.
- **BEMerge (Canonical Merge & Dedupe)**
Identity graph, precedence policies, canonical JSON serializer, and deterministic hashing (`merge_event`).
- **BEExport (JSON & Trivy DB)**
Deterministic export trees, Trivy DB packaging, optional ORAS push, and offline bundle.
- **QA (Validation & Observability)**
Schema tests, fixture goldens, determinism checks, metrics/logs/traces, e2e reproducibility runs.
- **DevEx/Docs**
Maintains this agent framework, templates, and perdirectory guides; assists parallelization and reviews.
**Interaction sketch**
- Connectors → **Core** jobs → **Storage.Mongo**
- **Merge** refines canonical records; **Exporters** read canonical data to produce artifacts
- **QA** spans all layers with fixtures/metrics and determinism checks
---
## 5) Key technologies & integrations
- **Runtime**: .NET 10 (`net10.0`) preview SDK; C# latest preview features.
- **Data**: MongoDB (canonical store and job/export state).
- **Packaging**: Trivy DB (BoltDB inside `db.tar.gz`), vulnlist JSON (optional), ORAS for OCI push.
- **Observability**: structured logs, counters, and (optional) OpenTelemetry traces.
- **Ops posture**: offlinefirst, allowlist for remote hosts, strict schema validation, gated LLM fallback (only where explicitly configured).
---
## 6) Data flow (endtoend)
1. **Fetch**: connectors request source windows with retries/backoff, persist raw documents with SHA256/ETag metadata.
2. **Parse**: validate to DTOs (schemachecked), quarantine failures.
3. **Map**: normalize to canonical advisories (aliases, affected ranges with NEVRA/EVR/SemVer, references, provenance).
4. **Merge**: enforce precedence and determinism; track before/after hashes.
5. **Export**: JSON tree and/or Trivy DB; package and (optionally) push; write export state.
---
## 7) Work-in-parallel rules (important)
- **Directory ownership**: Each agent works **only inside its module directory**. Crossmodule edits require a brief handshake in issues/PR description.
- **Scoping**: Use each modules `AGENTS.md` and `TASKS.md` to plan; autonomous agents must read `src/AGENTS.md` and the module docs before acting.
- **Determinism**: Sort keys, normalize timestamps to UTC ISO8601, avoid nondeterministic data in exports and tests.
- **Status tracking**: Update your modules `TASKS.md` as you progress (TODO → DOING → DONE/BLOCKED).
- **Tests**: Add/extend fixtures and unit tests per change; never regress determinism or precedence.
---
## 8) Glossary (quick)
- **OVAL** — Vendor/distro security definition format; authoritative for OS packages.
- **NEVRA / EVR** — RPM and Debian version semantics for OS packages.
- **PURL / SemVer** — Coordinates and version semantics for OSS ecosystems.
- **KEV** — Known Exploited Vulnerabilities (flag only).

View File

@@ -0,0 +1,191 @@
@ -1,191 +0,0 @@
# ARCHITECTURE.md — **StellaOps.Feedser**
> **Goal**: Build a sovereign-ready, self-hostable **feed-merge service** that ingests authoritative vulnerability sources, normalizes and de-duplicates them into **MongoDB**, and exports **JSON** and **Trivy-compatible DB** artifacts.
> **Form factor**: Long-running **Web Service** with **REST APIs** (health, status, control) and an embedded **internal cron scheduler**.
> **No signing inside Feedser** (signing is a separate pipeline step).
> **Runtime SDK baseline**: .NET 10 Preview 7 (SDK 10.0.100-preview.7.25380.108) targeting `net10.0`, aligned with the deployed api.stella-ops.org service.
> **Three explicit stages**:
>
> 1. **Source Download** → raw documents.
> 2. **Merge + Dedupe + Normalization** → MongoDB canonical.
> 3. **Export** → JSON or TrivyDB (full or delta), then (externally) sign/publish.
---
## 1) Naming & Solution Layout
**Solution root**: `StellaOps.Feedser`
**Source connectors** namespace prefix: `StellaOps.Feedser.Source.*`
**Exporters**:
* `StellaOps.Feedser.Exporter.Json`
* `StellaOps.Feedser.Exporter.TrivyDb`
**Projects** (`/src`):
```
StellaOps.Feedser.WebService/ # ASP.NET Core (Minimal API, net10.0 preview) WebService + embedded scheduler
StellaOps.Feedser.Core/ # Domain models, pipelines, merge/dedupe engine, jobs orchestration
StellaOps.Feedser.Models/ # Canonical POCOs, JSON Schemas, enums
StellaOps.Feedser.Storage.Mongo/ # Mongo repositories, GridFS access, indexes, resume "flags"
StellaOps.Feedser.Source.Common/ # HTTP clients, rate-limiters, schema validators, parsers utils
StellaOps.Feedser.Source.Cve/
StellaOps.Feedser.Source.Nvd/
StellaOps.Feedser.Source.Ghsa/
StellaOps.Feedser.Source.Osv/
StellaOps.Feedser.Source.Jvn/
StellaOps.Feedser.Source.CertCc/
StellaOps.Feedser.Source.Kev/
StellaOps.Feedser.Source.Kisa/
StellaOps.Feedser.Source.CertIn/
StellaOps.Feedser.Source.CertFr/
StellaOps.Feedser.Source.CertBund/
StellaOps.Feedser.Source.Acsc/
StellaOps.Feedser.Source.Cccs/
StellaOps.Feedser.Source.Ru.Bdu/ # HTML→schema with LLM fallback (gated)
StellaOps.Feedser.Source.Ru.Nkcki/ # PDF/HTML bulletins → structured
StellaOps.Feedser.Source.Vndr.Msrc/
StellaOps.Feedser.Source.Vndr.Cisco/
StellaOps.Feedser.Source.Vndr.Oracle/
StellaOps.Feedser.Source.Vndr.Adobe/
StellaOps.Feedser.Source.Vndr.Apple/
StellaOps.Feedser.Source.Vndr.Chromium/
StellaOps.Feedser.Source.Vndr.Vmware/
StellaOps.Feedser.Source.Distro.RedHat/
StellaOps.Feedser.Source.Distro.Ubuntu/
StellaOps.Feedser.Source.Distro.Debian/
StellaOps.Feedser.Source.Distro.Suse/
StellaOps.Feedser.Source.Ics.Cisa/
StellaOps.Feedser.Source.Ics.Kaspersky/
StellaOps.Feedser.Normalization/ # Canonical mappers, validators, version-range normalization
StellaOps.Feedser.Merge/ # Identity graph, precedence, deterministic merge
StellaOps.Feedser.Exporter.Json/
StellaOps.Feedser.Exporter.TrivyDb/
StellaOps.Feedser.Tests/ # Unit, component, integration & golden fixtures
```
---
## 2) Runtime Shape
**Process**: single service (`StellaOps.Feedser.WebService`)
* `Program.cs`: top-level entry using **Generic Host**, **DI**, **Options** binding from `appsettings.json` + environment + optional `feedser.yaml`.
* Built-in **scheduler** (cron-like) + **job manager** with **distributed locks** in Mongo to prevent overlaps, enforce timeouts, allow cancel/kill.
* **REST APIs** for health/readiness/progress/trigger/kill/status.
**Key NuGet concepts** (indicative): `MongoDB.Driver`, `Polly` (retry/backoff), `System.Threading.Channels`, `Microsoft.Extensions.Http`, `Microsoft.Extensions.Hosting`, `Serilog`, `OpenTelemetry`.
---
## 3) Data Storage — **MongoDB** (single source of truth)
**Database**: `feedser`
**Write concern**: `majority` for merge/export state, `acknowledged` for raw docs.
**Collections** (with “flags”/resume points):
* `source`
* `_id`, `name`, `type`, `baseUrl`, `auth`, `notes`.
* `source_state`
* Keys: `sourceName` (unique), `enabled`, `cursor`, `lastSuccess`, `failCount`, `backoffUntil`, `paceOverrides`, `paused`.
* Drives incremental fetch/parse/map resume and operator pause/pace controls.
* `document`
* `_id`, `sourceName`, `uri`, `fetchedAt`, `sha256`, `contentType`, `status`, `metadata`, `gridFsId`, `etag`, `lastModified`.
* Index `{sourceName:1, uri:1}` unique; optional TTL for superseded versions.
* `dto`
* `_id`, `sourceName`, `documentId`, `schemaVer`, `payload` (BSON), `validatedAt`.
* Index `{sourceName:1, documentId:1}`.
* `advisory`
* `_id`, `advisoryKey`, `title`, `summary`, `lang`, `published`, `modified`, `severity`, `exploitKnown`.
* Unique `{advisoryKey:1}` plus indexes on `modified` and `published`.
* `alias`
* `advisoryId`, `scheme`, `value` with index `{scheme:1, value:1}`.
* `affected`
* `advisoryId`, `platform`, `name`, `versionRange`, `cpe`, `purl`, `fixedBy`, `introducedVersion`.
* Index `{platform:1, name:1}`, `{advisoryId:1}`.
* `reference`
* `advisoryId`, `url`, `kind`, `sourceTag` (e.g., advisory/patch/kb).
* Flags collections: `kev_flag`, `ru_flags`, `jp_flags`, `psirt_flags` keyed by `advisoryId`.
* `merge_event`
* `_id`, `advisoryKey`, `beforeHash`, `afterHash`, `mergedAt`, `inputs` (document ids).
* `export_state`
* `_id` (`json`/`trivydb`), `baseExportId`, `baseDigest`, `lastFullDigest`, `lastDeltaDigest`, `exportCursor`, `targetRepo`, `exporterVersion`.
* `locks`
* `_id` (`jobKey`), `holder`, `acquiredAt`, `heartbeatAt`, `leaseMs`, `ttlAt` (TTL index cleans dead locks).
* `jobs`
* `_id`, `type`, `args`, `state`, `startedAt`, `endedAt`, `error`, `owner`, `heartbeatAt`, `timeoutMs`.
**GridFS buckets**: `fs.documents` for raw large payloads; referenced by `document.gridFsId`.
---
## 4) Job & Scheduler Model
* Scheduler stores cron expressions per source/exporter in config; persists next-run pointers in Mongo.
* Jobs acquire locks (`locks` collection) to ensure singleton execution per source/exporter.
* Supports manual triggers via API endpoints (`POST /jobs/{type}`) and pause/resume toggles per source.
---
## 5) Connector Contracts
Connectors implement:
```csharp
public interface IFeedConnector {
string SourceName { get; }
Task FetchAsync(IServiceProvider sp, CancellationToken ct);
Task ParseAsync(IServiceProvider sp, CancellationToken ct);
Task MapAsync(IServiceProvider sp, CancellationToken ct);
}
```
* Fetch populates `document` rows respecting rate limits, conditional GET, and `source_state.cursor`.
* Parse validates schema (JSON Schema, XSD) and writes sanitized DTO payloads.
* Map produces canonical advisory rows + provenance entries; must be idempotent.
* Base helpers in `StellaOps.Feedser.Source.Common` provide HTTP clients, retry policies, and watermark utilities.
---
## 6) Merge & Normalization
* Canonical model stored in `StellaOps.Feedser.Models` with serialization contracts used by storage/export layers.
* `StellaOps.Feedser.Normalization` handles NEVRA/EVR/PURL range parsing, CVSS normalization, localization.
* `StellaOps.Feedser.Merge` builds alias graphs keyed by CVE first, then falls back to vendor/regional IDs.
* Precedence rules: PSIRT/OVAL overrides generic ranges; KEV only toggles exploitation; regional feeds enrich severity but dont override vendor truth.
* Determinism enforced via canonical JSON hashing logged in `merge_event`.
---
## 7) Exporters
* JSON exporter mirrors `aquasecurity/vuln-list` layout with deterministic ordering and reproducible timestamps.
* Trivy DB exporter initially shells out to `trivy-db` builder; later will emit BoltDB directly.
* `StellaOps.Feedser.Storage.Mongo` provides cursors for delta exports based on `export_state.exportCursor`.
* Export jobs produce OCI tarballs (layer media type `application/vnd.aquasec.trivy.db.layer.v1.tar+gzip`) and optionally push via ORAS.
---
## 8) Observability
* Serilog structured logging with enrichment fields (`source`, `uri`, `stage`, `durationMs`).
* OpenTelemetry traces around fetch/parse/map/export; metrics for rate limit hits, schema failures, dedupe ratios, package size.
* Prometheus scraping endpoint served by WebService.
---
## 9) Security Considerations
* Offline-first: connectors only reach allowlisted hosts.
* BDU LLM fallback gated by config flag; logs audit trail with confidence score.
* No secrets written to logs; secrets loaded via environment or mounted files.
* Signing handled outside Feedser pipeline.
---
## 10) Deployment Notes
* Default storage MongoDB; for air-gapped, bundle Mongo image + seeded data backup.
* Horizontal scale achieved via multiple web service instances sharing Mongo locks.
* Provide `feedser.yaml` template describing sources, rate limits, and export settings.

View File

@@ -0,0 +1,27 @@
# AGENTS
## Role
Job orchestration and lifecycle. Registers job definitions, schedules execution, triggers runs, reports status for connectors and exporters.
## Scope
- Contracts: IJob (execute with CancellationToken), JobRunStatus, JobTriggerOutcome/Result.
- Registration: JobSchedulerBuilder.AddJob<T>(kind, cronExpression?, timeout?, leaseDuration?); options recorded in JobSchedulerOptions.
- Plugin host integration discovers IJob providers via registered IDependencyInjectionRoutine implementations.
- Coordination: start/stop, single-flight via storage locks/leases, run bookkeeping (status, timings, errors).
- Triggering: manual/cron/API; parameterized runs; idempotent rejection if already running.
- Surfacing: enumerate definitions, last run, recent runs, active runs to WebService endpoints.
## Participants
- WebService exposes REST endpoints for definitions, runs, active, and trigger.
- Storage.Mongo persists job definitions metadata, run documents, and leases (locks collection).
- Source connectors and Exporters implement IJob and are registered into the scheduler via DI and Plugin routines.
- Models/Merge/Export are invoked indirectly through jobs.
- Plugin host runtime loads dependency injection routines that register job definitions.
## Interfaces & contracts
- Kind naming: family:source:verb (e.g., nvd:fetch, redhat:map, export:trivy-db).
- Timeout and lease duration enforce cancellation and duplicate-prevention.
- TimeProvider used for deterministic timing in tests.
## In/Out of scope
In: job lifecycle, registration, trigger semantics, run metadata.
Out: business logic of connectors/exporters, HTTP handlers (owned by WebService).
## Observability & security expectations
- Metrics: job.run.started/succeeded/failed, job.durationMs, job.concurrent.rejected, job.alreadyRunning.
- Logs: kind, trigger, params hash, lease holder, outcome; redact params containing secrets.
- Honor CancellationToken early and often.

View File

@@ -1,8 +0,0 @@
namespace StellaOps.Feedser.Core;
/// <summary>
/// Marker type for assembly discovery.
/// </summary>
public sealed class FeedserCoreMarker
{
}

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Feedser.Core.Jobs;
public interface IJob
{
Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Feedser.Core.Jobs;
public interface IJobCoordinator
{
Task<JobTriggerResult> TriggerAsync(string kind, IReadOnlyDictionary<string, object?>? parameters, string trigger, CancellationToken cancellationToken);
Task<IReadOnlyList<JobDefinition>> GetDefinitionsAsync(CancellationToken cancellationToken);
Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken);
Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken);
Task<JobRunSnapshot?> GetRunAsync(Guid runId, CancellationToken cancellationToken);
Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken);
Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,20 @@
namespace StellaOps.Feedser.Core.Jobs;
public interface IJobStore
{
Task<JobRunSnapshot> CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken);
Task<JobRunSnapshot?> TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken);
Task<JobRunSnapshot?> TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken);
Task<JobRunSnapshot?> FindAsync(Guid runId, CancellationToken cancellationToken);
Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken);
Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken);
Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken);
Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Feedser.Core.Jobs;
public interface ILeaseStore
{
Task<JobLease?> TryAcquireAsync(string key, string holder, TimeSpan leaseDuration, DateTimeOffset now, CancellationToken cancellationToken);
Task<JobLease?> HeartbeatAsync(string key, string holder, TimeSpan leaseDuration, DateTimeOffset now, CancellationToken cancellationToken);
Task<bool> ReleaseAsync(string key, string holder, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,635 @@
using System.Collections;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Globalization;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Feedser.Core.Jobs;
public sealed class JobCoordinator : IJobCoordinator
{
private readonly JobSchedulerOptions _options;
private readonly IJobStore _jobStore;
private readonly ILeaseStore _leaseStore;
private readonly IServiceScopeFactory _scopeFactory;
private readonly ILogger<JobCoordinator> _logger;
private readonly ILoggerFactory _loggerFactory;
private readonly TimeProvider _timeProvider;
private readonly JobDiagnostics _diagnostics;
private readonly string _holderId;
public JobCoordinator(
IOptions<JobSchedulerOptions> optionsAccessor,
IJobStore jobStore,
ILeaseStore leaseStore,
IServiceScopeFactory scopeFactory,
ILogger<JobCoordinator> logger,
ILoggerFactory loggerFactory,
TimeProvider timeProvider,
JobDiagnostics diagnostics)
{
_options = (optionsAccessor ?? throw new ArgumentNullException(nameof(optionsAccessor))).Value;
_jobStore = jobStore ?? throw new ArgumentNullException(nameof(jobStore));
_leaseStore = leaseStore ?? throw new ArgumentNullException(nameof(leaseStore));
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_holderId = BuildHolderId();
}
public async Task<JobTriggerResult> TriggerAsync(string kind, IReadOnlyDictionary<string, object?>? parameters, string trigger, CancellationToken cancellationToken)
{
using var triggerActivity = _diagnostics.StartTriggerActivity(kind, trigger);
if (!_options.Definitions.TryGetValue(kind, out var definition))
{
var result = JobTriggerResult.NotFound($"Job kind '{kind}' is not registered.");
triggerActivity?.SetStatus(ActivityStatusCode.Error, result.ErrorMessage);
triggerActivity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
_diagnostics.RecordTriggerRejected(kind, trigger, "not_found");
return result;
}
triggerActivity?.SetTag("job.enabled", definition.Enabled);
triggerActivity?.SetTag("job.timeout_seconds", definition.Timeout.TotalSeconds);
triggerActivity?.SetTag("job.lease_seconds", definition.LeaseDuration.TotalSeconds);
if (!definition.Enabled)
{
var result = JobTriggerResult.Disabled($"Job kind '{kind}' is disabled.");
triggerActivity?.SetStatus(ActivityStatusCode.Ok, "disabled");
triggerActivity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
_diagnostics.RecordTriggerRejected(kind, trigger, "disabled");
return result;
}
parameters ??= new Dictionary<string, object?>();
var parameterSnapshot = parameters.Count == 0
? new Dictionary<string, object?>(StringComparer.Ordinal)
: new Dictionary<string, object?>(parameters, StringComparer.Ordinal);
if (!TryNormalizeParameters(parameterSnapshot, out var normalizedParameters, out var parameterError))
{
var message = string.IsNullOrWhiteSpace(parameterError)
? "Job trigger parameters contain unsupported values."
: parameterError;
triggerActivity?.SetStatus(ActivityStatusCode.Error, message);
triggerActivity?.SetTag("job.trigger.outcome", JobTriggerOutcome.InvalidParameters.ToString());
_diagnostics.RecordTriggerRejected(kind, trigger, "invalid_parameters");
return JobTriggerResult.InvalidParameters(message);
}
parameterSnapshot = normalizedParameters;
string? parametersHash;
try
{
parametersHash = JobParametersHasher.Compute(parameterSnapshot);
}
catch (Exception ex)
{
var message = $"Job trigger parameters cannot be serialized: {ex.Message}";
triggerActivity?.SetStatus(ActivityStatusCode.Error, message);
triggerActivity?.SetTag("job.trigger.outcome", JobTriggerOutcome.InvalidParameters.ToString());
_diagnostics.RecordTriggerRejected(kind, trigger, "invalid_parameters");
_logger.LogWarning(ex, "Failed to serialize parameters for job {Kind}", kind);
return JobTriggerResult.InvalidParameters(message);
}
triggerActivity?.SetTag("job.parameters_count", parameterSnapshot.Count);
var now = _timeProvider.GetUtcNow();
var leaseDuration = definition.LeaseDuration <= TimeSpan.Zero ? _options.DefaultLeaseDuration : definition.LeaseDuration;
JobLease? lease = null;
try
{
lease = await _leaseStore.TryAcquireAsync(definition.LeaseKey, _holderId, leaseDuration, now, cancellationToken).ConfigureAwait(false);
if (lease is null)
{
var result = JobTriggerResult.AlreadyRunning($"Job '{kind}' is already running.");
triggerActivity?.SetStatus(ActivityStatusCode.Ok, "already_running");
triggerActivity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
_diagnostics.RecordTriggerRejected(kind, trigger, "already_running");
return result;
}
var createdAt = _timeProvider.GetUtcNow();
var request = new JobRunCreateRequest(
definition.Kind,
trigger,
parameterSnapshot,
parametersHash,
definition.Timeout,
leaseDuration,
createdAt);
triggerActivity?.SetTag("job.parameters_hash", request.ParametersHash);
var run = await _jobStore.CreateAsync(request, cancellationToken).ConfigureAwait(false);
var startedAt = _timeProvider.GetUtcNow();
var started = await _jobStore.TryStartAsync(run.RunId, startedAt, cancellationToken).ConfigureAwait(false) ?? run;
triggerActivity?.SetTag("job.run_id", started.RunId);
triggerActivity?.SetTag("job.created_at", createdAt.UtcDateTime);
triggerActivity?.SetTag("job.started_at", started.StartedAt?.UtcDateTime ?? startedAt.UtcDateTime);
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
if (definition.Timeout > TimeSpan.Zero)
{
linkedTokenSource.CancelAfter(definition.Timeout);
}
var capturedLease = lease ?? throw new InvalidOperationException("Lease acquisition returned null.");
try
{
_ = Task.Run(() => ExecuteJobAsync(definition, capturedLease, started, parameterSnapshot, trigger, linkedTokenSource), CancellationToken.None)
.ContinueWith(t =>
{
if (t.Exception is not null)
{
_logger.LogError(t.Exception, "Unhandled job execution failure for {Kind}", definition.Kind);
}
},
TaskContinuationOptions.OnlyOnFaulted | TaskContinuationOptions.ExecuteSynchronously);
lease = null; // released by background job execution
}
catch (Exception ex)
{
lease = capturedLease; // ensure outer finally releases if scheduling fails
triggerActivity?.SetStatus(ActivityStatusCode.Error, ex.Message);
triggerActivity?.SetTag("job.trigger.outcome", "exception");
_diagnostics.RecordTriggerRejected(kind, trigger, "queue_failure");
throw;
}
var accepted = JobTriggerResult.Accepted(started);
_diagnostics.RecordTriggerAccepted(kind, trigger);
triggerActivity?.SetStatus(ActivityStatusCode.Ok);
triggerActivity?.SetTag("job.trigger.outcome", accepted.Outcome.ToString());
return accepted;
}
catch (Exception ex)
{
triggerActivity?.SetStatus(ActivityStatusCode.Error, ex.Message);
triggerActivity?.SetTag("job.trigger.outcome", "exception");
_diagnostics.RecordTriggerRejected(kind, trigger, "exception");
throw;
}
finally
{
// Release handled by background execution path. If we failed before scheduling, release here.
if (lease is not null)
{
var releaseError = await TryReleaseLeaseAsync(lease, definition.Kind).ConfigureAwait(false);
if (releaseError is not null)
{
_logger.LogError(releaseError, "Failed to release lease {LeaseKey} for job {Kind}", lease.Key, definition.Kind);
}
}
}
}
public Task<IReadOnlyList<JobDefinition>> GetDefinitionsAsync(CancellationToken cancellationToken)
{
IReadOnlyList<JobDefinition> results = _options.Definitions.Values.OrderBy(x => x.Kind, StringComparer.Ordinal).ToArray();
return Task.FromResult(results);
}
public Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken)
=> _jobStore.GetRecentRunsAsync(kind, limit, cancellationToken);
public Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken)
=> _jobStore.GetActiveRunsAsync(cancellationToken);
public Task<JobRunSnapshot?> GetRunAsync(Guid runId, CancellationToken cancellationToken)
=> _jobStore.FindAsync(runId, cancellationToken);
public Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken)
=> _jobStore.GetLastRunAsync(kind, cancellationToken);
public Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken)
=> _jobStore.GetLastRunsAsync(kinds, cancellationToken);
private static bool TryNormalizeParameters(
IReadOnlyDictionary<string, object?> source,
out Dictionary<string, object?> normalized,
out string? error)
{
if (source.Count == 0)
{
normalized = new Dictionary<string, object?>(StringComparer.Ordinal);
error = null;
return true;
}
normalized = new Dictionary<string, object?>(source.Count, StringComparer.Ordinal);
foreach (var kvp in source)
{
if (string.IsNullOrWhiteSpace(kvp.Key))
{
error = "Parameter keys must be non-empty strings.";
normalized = default!;
return false;
}
try
{
normalized[kvp.Key] = NormalizeParameterValue(kvp.Value);
}
catch (Exception ex)
{
error = $"Parameter '{kvp.Key}' cannot be serialized: {ex.Message}";
normalized = default!;
return false;
}
}
error = null;
return true;
}
private static object? NormalizeParameterValue(object? value)
{
if (value is null)
{
return null;
}
switch (value)
{
case string or bool or double or decimal:
return value;
case byte or sbyte or short or ushort or int or long:
return Convert.ToInt64(value, CultureInfo.InvariantCulture);
case uint ui:
return Convert.ToInt64(ui);
case ulong ul when ul <= long.MaxValue:
return (long)ul;
case ulong ul:
return ul.ToString(CultureInfo.InvariantCulture);
case float f:
return (double)f;
case DateTime dt:
return dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime();
case DateTimeOffset dto:
return dto.ToUniversalTime();
case TimeSpan ts:
return ts.ToString("c", CultureInfo.InvariantCulture);
case Guid guid:
return guid.ToString("D");
case Enum enumValue:
return enumValue.ToString();
case byte[] bytes:
return Convert.ToBase64String(bytes);
case JsonDocument document:
return NormalizeJsonElement(document.RootElement);
case JsonElement element:
return NormalizeJsonElement(element);
case IDictionary dictionary:
{
var nested = new SortedDictionary<string, object?>(StringComparer.Ordinal);
foreach (DictionaryEntry entry in dictionary)
{
if (entry.Key is not string key || string.IsNullOrWhiteSpace(key))
{
throw new InvalidOperationException("Nested dictionary keys must be non-empty strings.");
}
nested[key] = NormalizeParameterValue(entry.Value);
}
return nested;
}
case IEnumerable enumerable when value is not string:
{
var list = new List<object?>();
foreach (var item in enumerable)
{
list.Add(NormalizeParameterValue(item));
}
return list;
}
default:
throw new InvalidOperationException($"Unsupported parameter value of type '{value.GetType().FullName}'.");
}
}
private static object? NormalizeJsonElement(JsonElement element)
{
return element.ValueKind switch
{
JsonValueKind.Null => null,
JsonValueKind.String => element.GetString(),
JsonValueKind.True => true,
JsonValueKind.False => false,
JsonValueKind.Number => element.TryGetInt64(out var l)
? l
: element.TryGetDecimal(out var dec)
? dec
: element.GetDouble(),
JsonValueKind.Object => NormalizeJsonObject(element),
JsonValueKind.Array => NormalizeJsonArray(element),
_ => throw new InvalidOperationException($"Unsupported JSON value '{element.ValueKind}'."),
};
}
private static SortedDictionary<string, object?> NormalizeJsonObject(JsonElement element)
{
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal);
foreach (var property in element.EnumerateObject())
{
result[property.Name] = NormalizeJsonElement(property.Value);
}
return result;
}
private static List<object?> NormalizeJsonArray(JsonElement element)
{
var items = new List<object?>();
foreach (var item in element.EnumerateArray())
{
items.Add(NormalizeJsonElement(item));
}
return items;
}
private async Task<JobRunSnapshot?> CompleteRunAsync(Guid runId, JobRunStatus status, string? error, CancellationToken cancellationToken)
{
var completedAt = _timeProvider.GetUtcNow();
var completion = new JobRunCompletion(status, completedAt, error);
return await _jobStore.TryCompleteAsync(runId, completion, cancellationToken).ConfigureAwait(false);
}
private TimeSpan? ResolveDuration(JobRunSnapshot original, JobRunSnapshot? completed)
{
if (completed?.Duration is { } duration)
{
return duration;
}
var startedAt = completed?.StartedAt ?? original.StartedAt ?? original.CreatedAt;
var completedAt = completed?.CompletedAt ?? _timeProvider.GetUtcNow();
var elapsed = completedAt - startedAt;
return elapsed >= TimeSpan.Zero ? elapsed : null;
}
private static async Task<Exception?> ObserveLeaseTaskAsync(Task heartbeatTask)
{
try
{
await heartbeatTask.ConfigureAwait(false);
return null;
}
catch (OperationCanceledException)
{
return null;
}
catch (Exception ex)
{
return ex;
}
}
private async Task<Exception?> TryReleaseLeaseAsync(JobLease lease, string kind)
{
try
{
await _leaseStore.ReleaseAsync(lease.Key, _holderId, CancellationToken.None).ConfigureAwait(false);
return null;
}
catch (Exception ex)
{
return new LeaseMaintenanceException($"Failed to release lease for job '{kind}'.", ex);
}
}
private static Exception? CombineLeaseExceptions(Exception? first, Exception? second)
{
if (first is null)
{
return second;
}
if (second is null)
{
return first;
}
return new AggregateException(first, second);
}
private async Task ExecuteJobAsync(
JobDefinition definition,
JobLease lease,
JobRunSnapshot run,
IReadOnlyDictionary<string, object?> parameters,
string trigger,
CancellationTokenSource linkedTokenSource)
{
using (linkedTokenSource)
{
var cancellationToken = linkedTokenSource.Token;
using var heartbeatCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
var heartbeatTask = MaintainLeaseAsync(definition, lease, heartbeatCts.Token);
using var activity = _diagnostics.StartExecutionActivity(run.Kind, trigger, run.RunId);
activity?.SetTag("job.timeout_seconds", definition.Timeout.TotalSeconds);
activity?.SetTag("job.lease_seconds", definition.LeaseDuration.TotalSeconds);
activity?.SetTag("job.parameters_count", parameters.Count);
activity?.SetTag("job.created_at", run.CreatedAt.UtcDateTime);
activity?.SetTag("job.started_at", (run.StartedAt ?? run.CreatedAt).UtcDateTime);
activity?.SetTag("job.parameters_hash", run.ParametersHash);
_diagnostics.RecordRunStarted(run.Kind);
JobRunStatus finalStatus = JobRunStatus.Succeeded;
string? error = null;
Exception? executionException = null;
JobRunSnapshot? completedSnapshot = null;
Exception? leaseException = null;
try
{
using var scope = _scopeFactory.CreateScope();
var job = (IJob)scope.ServiceProvider.GetRequiredService(definition.JobType);
var jobLogger = _loggerFactory.CreateLogger(definition.JobType);
var context = new JobExecutionContext(
run.RunId,
run.Kind,
trigger,
parameters,
scope.ServiceProvider,
_timeProvider,
jobLogger);
await job.ExecuteAsync(context, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException oce)
{
finalStatus = JobRunStatus.Cancelled;
error = oce.Message;
executionException = oce;
}
catch (Exception ex)
{
finalStatus = JobRunStatus.Failed;
error = ex.ToString();
executionException = ex;
}
finally
{
heartbeatCts.Cancel();
leaseException = await ObserveLeaseTaskAsync(heartbeatTask).ConfigureAwait(false);
var releaseException = await TryReleaseLeaseAsync(lease, definition.Kind).ConfigureAwait(false);
leaseException = CombineLeaseExceptions(leaseException, releaseException);
if (leaseException is not null)
{
var leaseMessage = $"Lease maintenance failed: {leaseException.GetType().Name}: {leaseException.Message}";
if (finalStatus != JobRunStatus.Failed)
{
finalStatus = JobRunStatus.Failed;
error = leaseMessage;
executionException = leaseException;
}
else
{
error = string.IsNullOrWhiteSpace(error)
? leaseMessage
: $"{error}{Environment.NewLine}{leaseMessage}";
executionException = executionException is null
? leaseException
: new AggregateException(executionException, leaseException);
}
}
}
completedSnapshot = await CompleteRunAsync(run.RunId, finalStatus, error, CancellationToken.None).ConfigureAwait(false);
if (!string.IsNullOrWhiteSpace(error))
{
activity?.SetTag("job.error", error);
}
activity?.SetTag("job.status", finalStatus.ToString());
var completedDuration = ResolveDuration(run, completedSnapshot);
if (completedDuration.HasValue)
{
activity?.SetTag("job.duration_seconds", completedDuration.Value.TotalSeconds);
}
switch (finalStatus)
{
case JobRunStatus.Succeeded:
activity?.SetStatus(ActivityStatusCode.Ok);
_logger.LogInformation("Job {Kind} run {RunId} succeeded", run.Kind, run.RunId);
break;
case JobRunStatus.Cancelled:
activity?.SetStatus(ActivityStatusCode.Ok, "cancelled");
_logger.LogWarning(executionException, "Job {Kind} run {RunId} cancelled", run.Kind, run.RunId);
break;
case JobRunStatus.Failed:
activity?.SetStatus(ActivityStatusCode.Error, executionException?.Message ?? error);
_logger.LogError(executionException, "Job {Kind} run {RunId} failed", run.Kind, run.RunId);
break;
}
_diagnostics.RecordRunCompleted(run.Kind, finalStatus, completedDuration, error);
}
}
private async Task MaintainLeaseAsync(JobDefinition definition, JobLease lease, CancellationToken cancellationToken)
{
var leaseDuration = lease.LeaseDuration <= TimeSpan.Zero ? _options.DefaultLeaseDuration : lease.LeaseDuration;
var delay = TimeSpan.FromMilliseconds(Math.Max(1000, leaseDuration.TotalMilliseconds / 2));
while (!cancellationToken.IsCancellationRequested)
{
try
{
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
break;
}
var now = _timeProvider.GetUtcNow();
try
{
await _leaseStore.HeartbeatAsync(definition.LeaseKey, _holderId, leaseDuration, now, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
throw new LeaseMaintenanceException($"Failed to heartbeat lease for job '{definition.Kind}'.", ex);
}
}
}
private static string BuildHolderId()
{
var machine = Environment.MachineName;
var processId = Environment.ProcessId;
return $"{machine}:{processId}";
}
}
internal sealed class LeaseMaintenanceException : Exception
{
public LeaseMaintenanceException(string message, Exception innerException)
: base(message, innerException)
{
}
}
internal static class JobParametersHasher
{
internal static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
};
public static string? Compute(IReadOnlyDictionary<string, object?> parameters)
{
if (parameters is null || parameters.Count == 0)
{
return null;
}
var canonicalJson = JsonSerializer.Serialize(Sort(parameters), SerializerOptions);
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static SortedDictionary<string, object?> Sort(IReadOnlyDictionary<string, object?> parameters)
{
var sorted = new SortedDictionary<string, object?>(StringComparer.Ordinal);
foreach (var kvp in parameters)
{
sorted[kvp.Key] = kvp.Value;
}
return sorted;
}
}

View File

@@ -0,0 +1,12 @@
namespace StellaOps.Feedser.Core.Jobs;
public sealed record JobDefinition(
string Kind,
Type JobType,
TimeSpan Timeout,
TimeSpan LeaseDuration,
string? CronExpression,
bool Enabled)
{
public string LeaseKey => $"job:{Kind}";
}

View File

@@ -0,0 +1,171 @@
using System.Diagnostics;
using System.Diagnostics.Metrics;
namespace StellaOps.Feedser.Core.Jobs;
public sealed class JobDiagnostics : IDisposable
{
public const string ActivitySourceName = "StellaOps.Feedser.Jobs";
public const string MeterName = "StellaOps.Feedser.Jobs";
public const string TriggerActivityName = "feedser.job.trigger";
public const string ExecuteActivityName = "feedser.job.execute";
public const string SchedulerActivityName = "feedser.scheduler.evaluate";
private readonly Counter<long> _triggersAccepted;
private readonly Counter<long> _triggersRejected;
private readonly Counter<long> _runsCompleted;
private readonly UpDownCounter<long> _runsActive;
private readonly Histogram<double> _runDurationSeconds;
private readonly Histogram<double> _schedulerSkewMilliseconds;
public JobDiagnostics()
{
ActivitySource = new ActivitySource(ActivitySourceName);
Meter = new Meter(MeterName);
_triggersAccepted = Meter.CreateCounter<long>(
name: "feedser.jobs.triggers.accepted",
unit: "count",
description: "Number of job trigger requests accepted for execution.");
_triggersRejected = Meter.CreateCounter<long>(
name: "feedser.jobs.triggers.rejected",
unit: "count",
description: "Number of job trigger requests rejected or ignored by the coordinator.");
_runsCompleted = Meter.CreateCounter<long>(
name: "feedser.jobs.runs.completed",
unit: "count",
description: "Number of job executions that have finished grouped by outcome.");
_runsActive = Meter.CreateUpDownCounter<long>(
name: "feedser.jobs.runs.active",
unit: "count",
description: "Current number of running job executions.");
_runDurationSeconds = Meter.CreateHistogram<double>(
name: "feedser.jobs.runs.duration",
unit: "s",
description: "Distribution of job execution durations in seconds.");
_schedulerSkewMilliseconds = Meter.CreateHistogram<double>(
name: "feedser.scheduler.skew",
unit: "ms",
description: "Difference between the intended and actual scheduler fire time in milliseconds.");
}
public ActivitySource ActivitySource { get; }
public Meter Meter { get; }
public Activity? StartTriggerActivity(string kind, string trigger)
{
var activity = ActivitySource.StartActivity(TriggerActivityName, ActivityKind.Internal);
if (activity is not null)
{
activity.SetTag("job.kind", kind);
activity.SetTag("job.trigger", trigger);
}
return activity;
}
public Activity? StartSchedulerActivity(string kind, DateTimeOffset scheduledFor, DateTimeOffset invokedAt)
{
var activity = ActivitySource.StartActivity(SchedulerActivityName, ActivityKind.Internal);
if (activity is not null)
{
activity.SetTag("job.kind", kind);
activity.SetTag("job.scheduled_for", scheduledFor.UtcDateTime);
activity.SetTag("job.invoked_at", invokedAt.UtcDateTime);
activity.SetTag("job.scheduler_delay_ms", (invokedAt - scheduledFor).TotalMilliseconds);
}
return activity;
}
public Activity? StartExecutionActivity(string kind, string trigger, Guid runId)
{
var activity = ActivitySource.StartActivity(ExecuteActivityName, ActivityKind.Internal);
if (activity is not null)
{
activity.SetTag("job.kind", kind);
activity.SetTag("job.trigger", trigger);
activity.SetTag("job.run_id", runId);
}
return activity;
}
public void RecordTriggerAccepted(string kind, string trigger)
{
var tags = new TagList
{
{ "job.kind", kind },
{ "job.trigger", trigger },
};
_triggersAccepted.Add(1, tags);
}
public void RecordTriggerRejected(string kind, string trigger, string reason)
{
var tags = new TagList
{
{ "job.kind", kind },
{ "job.trigger", trigger },
{ "job.reason", reason },
};
_triggersRejected.Add(1, tags);
}
public void RecordRunStarted(string kind)
{
var tags = new TagList { { "job.kind", kind } };
_runsActive.Add(1, tags);
}
public void RecordRunCompleted(string kind, JobRunStatus status, TimeSpan? duration, string? error)
{
var outcome = status.ToString();
var completionTags = new TagList
{
{ "job.kind", kind },
{ "job.status", outcome },
};
if (!string.IsNullOrWhiteSpace(error))
{
completionTags.Add("job.error", error);
}
_runsCompleted.Add(1, completionTags);
var activeTags = new TagList { { "job.kind", kind } };
_runsActive.Add(-1, activeTags);
if (duration.HasValue)
{
var seconds = Math.Max(duration.Value.TotalSeconds, 0d);
var durationTags = new TagList
{
{ "job.kind", kind },
{ "job.status", outcome },
};
_runDurationSeconds.Record(seconds, durationTags);
}
}
public void RecordSchedulerSkew(string kind, DateTimeOffset scheduledFor, DateTimeOffset invokedAt)
{
var skew = (invokedAt - scheduledFor).TotalMilliseconds;
var tags = new TagList { { "job.kind", kind } };
_schedulerSkewMilliseconds.Record(skew, tags);
}
public void Dispose()
{
ActivitySource.Dispose();
Meter.Dispose();
}
}

View File

@@ -0,0 +1,42 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
namespace StellaOps.Feedser.Core.Jobs;
public sealed class JobExecutionContext
{
public JobExecutionContext(
Guid runId,
string kind,
string trigger,
IReadOnlyDictionary<string, object?> parameters,
IServiceProvider services,
TimeProvider timeProvider,
ILogger logger)
{
RunId = runId;
Kind = kind;
Trigger = trigger;
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
Services = services ?? throw new ArgumentNullException(nameof(services));
TimeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
Logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public Guid RunId { get; }
public string Kind { get; }
public string Trigger { get; }
public IReadOnlyDictionary<string, object?> Parameters { get; }
public IServiceProvider Services { get; }
public TimeProvider TimeProvider { get; }
public ILogger Logger { get; }
public T GetRequiredService<T>() where T : notnull
=> Services.GetRequiredService<T>();
}

View File

@@ -0,0 +1,9 @@
namespace StellaOps.Feedser.Core.Jobs;
public sealed record JobLease(
string Key,
string Holder,
DateTimeOffset AcquiredAt,
DateTimeOffset HeartbeatAt,
TimeSpan LeaseDuration,
DateTimeOffset TtlAt);

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Feedser.Core.Jobs;
public sealed record JobRunCompletion(
JobRunStatus Status,
DateTimeOffset CompletedAt,
string? Error);

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Feedser.Core.Jobs;
public sealed record JobRunCreateRequest(
string Kind,
string Trigger,
IReadOnlyDictionary<string, object?> Parameters,
string? ParametersHash,
TimeSpan? Timeout,
TimeSpan? LeaseDuration,
DateTimeOffset CreatedAt);

View File

@@ -0,0 +1,21 @@
namespace StellaOps.Feedser.Core.Jobs;
/// <summary>
/// Immutable projection of a job run as stored in persistence.
/// </summary>
public sealed record JobRunSnapshot(
Guid RunId,
string Kind,
JobRunStatus Status,
DateTimeOffset CreatedAt,
DateTimeOffset? StartedAt,
DateTimeOffset? CompletedAt,
string Trigger,
string? ParametersHash,
string? Error,
TimeSpan? Timeout,
TimeSpan? LeaseDuration,
IReadOnlyDictionary<string, object?> Parameters)
{
public TimeSpan? Duration => StartedAt is null || CompletedAt is null ? null : CompletedAt - StartedAt;
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Feedser.Core.Jobs;
public enum JobRunStatus
{
Pending,
Running,
Succeeded,
Failed,
Cancelled,
}

View File

@@ -0,0 +1,47 @@
using System;
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.Feedser.Core.Jobs;
public sealed class JobSchedulerBuilder
{
private readonly IServiceCollection _services;
public JobSchedulerBuilder(IServiceCollection services)
{
_services = services ?? throw new ArgumentNullException(nameof(services));
}
public JobSchedulerBuilder AddJob<TJob>(
string kind,
string? cronExpression = null,
TimeSpan? timeout = null,
TimeSpan? leaseDuration = null,
bool enabled = true)
where TJob : class, IJob
{
ArgumentException.ThrowIfNullOrEmpty(kind);
_services.AddTransient<TJob>();
_services.Configure<JobSchedulerOptions>(options =>
{
if (options.Definitions.ContainsKey(kind))
{
throw new InvalidOperationException($"Job '{kind}' is already registered.");
}
var resolvedTimeout = timeout ?? options.DefaultTimeout;
var resolvedLease = leaseDuration ?? options.DefaultLeaseDuration;
options.Definitions.Add(kind, new JobDefinition(
kind,
typeof(TJob),
resolvedTimeout,
resolvedLease,
cronExpression,
enabled));
});
return this;
}
}

View File

@@ -0,0 +1,165 @@
using Cronos;
using System.Diagnostics;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Feedser.Core.Jobs;
/// <summary>
/// Background service that evaluates cron expressions for registered jobs and triggers them.
/// </summary>
public sealed class JobSchedulerHostedService : BackgroundService
{
private readonly IJobCoordinator _coordinator;
private readonly JobSchedulerOptions _options;
private readonly ILogger<JobSchedulerHostedService> _logger;
private readonly TimeProvider _timeProvider;
private readonly JobDiagnostics _diagnostics;
private readonly Dictionary<string, CronExpression> _cronExpressions = new(StringComparer.Ordinal);
private readonly Dictionary<string, DateTimeOffset> _nextOccurrences = new(StringComparer.Ordinal);
public JobSchedulerHostedService(
IJobCoordinator coordinator,
IOptions<JobSchedulerOptions> optionsAccessor,
ILogger<JobSchedulerHostedService> logger,
TimeProvider timeProvider,
JobDiagnostics diagnostics)
{
_coordinator = coordinator ?? throw new ArgumentNullException(nameof(coordinator));
_options = (optionsAccessor ?? throw new ArgumentNullException(nameof(optionsAccessor))).Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
foreach (var definition in _options.Definitions.Values)
{
if (string.IsNullOrWhiteSpace(definition.CronExpression))
{
continue;
}
try
{
var cron = CronExpression.Parse(definition.CronExpression!, CronFormat.Standard);
_cronExpressions[definition.Kind] = cron;
}
catch (CronFormatException ex)
{
_logger.LogError(ex, "Invalid cron expression '{Cron}' for job {Kind}", definition.CronExpression, definition.Kind);
}
}
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (_cronExpressions.Count == 0)
{
_logger.LogInformation("No cron-based jobs registered; scheduler idle.");
await Task.Delay(Timeout.Infinite, stoppingToken).ConfigureAwait(false);
return;
}
while (!stoppingToken.IsCancellationRequested)
{
var now = _timeProvider.GetUtcNow();
var nextWake = now.AddMinutes(5); // default sleep when nothing scheduled
foreach (var (kind, cron) in _cronExpressions)
{
if (!_options.Definitions.TryGetValue(kind, out var definition) || !definition.Enabled)
{
continue;
}
var next = GetNextOccurrence(kind, cron, now);
if (next <= now.AddMilliseconds(500))
{
_ = TriggerJobAsync(kind, next, stoppingToken);
_nextOccurrences[kind] = GetNextOccurrence(kind, cron, now.AddSeconds(1));
next = _nextOccurrences[kind];
}
if (next < nextWake)
{
nextWake = next;
}
}
var delay = nextWake - now;
if (delay < TimeSpan.FromSeconds(1))
{
delay = TimeSpan.FromSeconds(1);
}
try
{
await Task.Delay(delay, stoppingToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
break;
}
}
}
private DateTimeOffset GetNextOccurrence(string kind, CronExpression cron, DateTimeOffset reference)
{
if (_nextOccurrences.TryGetValue(kind, out var cached) && cached > reference)
{
return cached;
}
var next = cron.GetNextOccurrence(reference.UtcDateTime, TimeZoneInfo.Utc);
if (next is null)
{
// No future occurrence; schedule far in future to avoid tight loop.
next = reference.UtcDateTime.AddYears(100);
}
var nextUtc = DateTime.SpecifyKind(next.Value, DateTimeKind.Utc);
var offset = new DateTimeOffset(nextUtc);
_nextOccurrences[kind] = offset;
return offset;
}
private async Task TriggerJobAsync(string kind, DateTimeOffset scheduledFor, CancellationToken stoppingToken)
{
var invokedAt = _timeProvider.GetUtcNow();
_diagnostics.RecordSchedulerSkew(kind, scheduledFor, invokedAt);
using var activity = _diagnostics.StartSchedulerActivity(kind, scheduledFor, invokedAt);
try
{
var result = await _coordinator.TriggerAsync(kind, parameters: null, trigger: "scheduler", stoppingToken).ConfigureAwait(false);
activity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
if (result.Run is not null)
{
activity?.SetTag("job.run_id", result.Run.RunId);
}
if (!string.IsNullOrWhiteSpace(result.ErrorMessage))
{
activity?.SetTag("job.trigger.error", result.ErrorMessage);
}
if (result.Outcome == JobTriggerOutcome.Accepted)
{
activity?.SetStatus(ActivityStatusCode.Ok);
}
else
{
activity?.SetStatus(ActivityStatusCode.Ok, result.Outcome.ToString());
}
if (result.Outcome != JobTriggerOutcome.Accepted)
{
_logger.LogDebug("Scheduler trigger for {Kind} resulted in {Outcome}", kind, result.Outcome);
}
}
catch (Exception ex) when (!stoppingToken.IsCancellationRequested)
{
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
_logger.LogError(ex, "Cron trigger for job {Kind} failed", kind);
}
}
}

View File

@@ -0,0 +1,12 @@
namespace StellaOps.Feedser.Core.Jobs;
public sealed class JobSchedulerOptions
{
public static JobSchedulerOptions Empty { get; } = new();
public IDictionary<string, JobDefinition> Definitions { get; } = new Dictionary<string, JobDefinition>(StringComparer.Ordinal);
public TimeSpan DefaultTimeout { get; set; } = TimeSpan.FromMinutes(15);
public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5);
}

View File

@@ -0,0 +1,40 @@
namespace StellaOps.Feedser.Core.Jobs;
public enum JobTriggerOutcome
{
Accepted,
NotFound,
Disabled,
AlreadyRunning,
LeaseRejected,
InvalidParameters,
Failed,
Cancelled,
}
public sealed record JobTriggerResult(JobTriggerOutcome Outcome, JobRunSnapshot? Run, string? ErrorMessage)
{
public static JobTriggerResult Accepted(JobRunSnapshot run)
=> new(JobTriggerOutcome.Accepted, run, null);
public static JobTriggerResult NotFound(string message)
=> new(JobTriggerOutcome.NotFound, null, message);
public static JobTriggerResult Disabled(string message)
=> new(JobTriggerOutcome.Disabled, null, message);
public static JobTriggerResult AlreadyRunning(string message)
=> new(JobTriggerOutcome.AlreadyRunning, null, message);
public static JobTriggerResult LeaseRejected(string message)
=> new(JobTriggerOutcome.LeaseRejected, null, message);
public static JobTriggerResult InvalidParameters(string message)
=> new(JobTriggerOutcome.InvalidParameters, null, message);
public static JobTriggerResult Failed(JobRunSnapshot run, string error)
=> new(JobTriggerOutcome.Failed, run, error);
public static JobTriggerResult Cancelled(JobRunSnapshot run, string error)
=> new(JobTriggerOutcome.Cancelled, run, error);
}

View File

@@ -0,0 +1,27 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
namespace StellaOps.Feedser.Core.Jobs;
public static class JobServiceCollectionExtensions
{
public static JobSchedulerBuilder AddJobScheduler(this IServiceCollection services, Action<JobSchedulerOptions>? configure = null)
{
ArgumentNullException.ThrowIfNull(services);
var optionsBuilder = services.AddOptions<JobSchedulerOptions>();
if (configure is not null)
{
optionsBuilder.Configure(configure);
}
services.AddSingleton(sp => sp.GetRequiredService<IOptions<JobSchedulerOptions>>().Value);
services.AddSingleton<JobDiagnostics>();
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton<IJobCoordinator, JobCoordinator>();
services.AddHostedService<JobSchedulerHostedService>();
return new JobSchedulerBuilder(services);
}
}

View File

@@ -1,61 +0,0 @@
using System;
using System.Collections.Generic;
using System.Reflection;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Core;
public static class PluginBootstrapper
{
private static readonly string[] ConnectorPatterns =
{
"StellaOps.Feedser.Source.*.dll"
};
private static readonly string[] ExporterPatterns =
{
"StellaOps.Feedser.Exporter.*.dll"
};
public static IReadOnlyList<IConnectorPlugin> LoadConnectorPlugins(IServiceProvider? services = null, string? baseDirectory = null)
{
services ??= NullServiceProvider.Instance;
var catalog = BuildCatalog(baseDirectory, ConnectorPatterns);
return catalog.GetAvailableConnectorPlugins(services);
}
public static IReadOnlyList<IExporterPlugin> LoadExporterPlugins(IServiceProvider? services = null, string? baseDirectory = null)
{
services ??= NullServiceProvider.Instance;
var catalog = BuildCatalog(baseDirectory, ExporterPatterns);
return catalog.GetAvailableExporterPlugins(services);
}
private static PluginCatalog BuildCatalog(string? baseDirectory, IReadOnlyCollection<string> patterns)
{
var catalog = new PluginCatalog();
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
{
if (assembly.FullName is { } name && name.StartsWith("StellaOps.Feedser", StringComparison.OrdinalIgnoreCase))
{
catalog.AddAssembly(assembly);
}
}
baseDirectory ??= AppContext.BaseDirectory;
foreach (var pattern in patterns)
{
catalog.AddFromDirectory(baseDirectory, pattern);
}
return catalog;
}
private sealed class NullServiceProvider : IServiceProvider
{
public static NullServiceProvider Instance { get; } = new();
public object? GetService(Type serviceType) => null;
}
}

View File

@@ -1,19 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Merge/StellaOps.Feedser.Merge.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Exporter.Json/StellaOps.Feedser.Exporter.Json.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Exporter.TrivyDb/StellaOps.Feedser.Exporter.TrivyDb.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="8.0.0" />
<PackageReference Include="Cronos" Version="0.10.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,14 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|JobCoordinator implementation (create/get/mark status)|BE-Core|Storage.Mongo|DONE `JobCoordinator` drives Mongo-backed runs.|
|Cron scheduling loop with TimeProvider|BE-Core|Core|DONE `JobSchedulerHostedService` evaluates cron expressions.|
|Single-flight/lease semantics|BE-Core|Storage.Mongo|DONE lease acquisition backed by `MongoLeaseStore`.|
|Trigger API contract (Result mapping)|BE-Core|WebService|DONE `JobTriggerResult` outcomes map to HTTP statuses.|
|Run telemetry enrichment|BE-Core|Observability|DONE `JobDiagnostics` ties activities & counters into coordinator/scheduler paths.|
|Deterministic params hashing|BE-Core|Core|DONE `JobParametersHasher` creates SHA256 hash.|
|Golden tests for timeout/cancel|QA|Core|DONE JobCoordinatorTests cover cancellation timeout path.|
|JobSchedulerBuilder options registry coverage|BE-Core|Core|TODO verify cron/timeout/lease metadata persists for scheduler surfaces.|
|Plugin discovery + DI glue with PluginHost|BE-Core|Plugin libs|TODO auto-register job routines for connectors/exporters.|
|Harden lease release error handling in JobCoordinator|BE-Core|Storage.Mongo|DONE lease release failures now logged, wrapped, and drive run failure status; fire-and-forget execution guarded. Verified with `dotnet test --no-build --filter JobCoordinator`.|
|Validate job trigger parameters for serialization|BE-Core|WebService|DONE trigger parameters normalized/serialized with defensive checks returning InvalidParameters on failure. Full-suite `dotnet test --no-build` currently red from live connector fixture drift (Oracle/JVN/RedHat).|

View File

@@ -0,0 +1,23 @@
# AGENTS
## Role
Optional exporter producing vuln-list-shaped JSON tree for downstream trivy-db builder or interoperability. Deterministic, provenance-preserving.
## Scope
- Transform canonical advisories into directory tree structure mirroring aquasecurity/vuln-list (by ecosystem/vendor/distro as applicable).
- Sorting and serialization invariants: stable key order, newline policy, UTC ISO-8601.
- Cursoring/incremental export: export_state tracks last advisory hash/time to avoid full rewrites.
- Packaging: output directory under exports/json/<timestamp> with reproducible naming; optionally symlink latest.
- Optional auxiliary index files (for example severity summaries) may be generated when explicitly requested, but must remain deterministic and avoid altering canonical payloads.
## Participants
- Storage.Mongo.AdvisoryStore as input; ExportState repository for cursors/digests.
- Core scheduler runs JsonExportJob; Plugin DI wires JsonExporter + job.
- TrivyDb exporter may consume the rendered tree in v0 (builder path) if configured.
## Interfaces & contracts
- Job kind: export:json (JsonExportJob).
- Determinism: same inputs -> identical file bytes; hash snapshot persisted.
- Provenance: include minimal provenance fields when helpful; keep identity stable.
## In/Out of scope
In: JSON rendering and layout; incremental/deterministic writes.
Out: ORAS push and Trivy DB BoltDB writing (owned by Trivy exporter).
## Observability & security expectations
- Metrics: export.json.records, bytes, duration, delta.changed.
- Logs: target path, record counts, digest; no sensitive data.

View File

@@ -1,25 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Exporter.Json;
public sealed class JsonExporterPlugin : IExporterPlugin
{
public string Name => "json";
public bool IsAvailable(IServiceProvider services) => true;
public IFeedExporter Create(IServiceProvider services) => new StubExporter(Name);
private sealed class StubExporter : IFeedExporter
{
public StubExporter(string name) => Name = name;
public string Name { get; }
public Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
}
}

View File

@@ -0,0 +1,52 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Feedser.Exporter.Json;
public static class ExportDigestCalculator
{
public static string ComputeTreeDigest(JsonExportResult result)
{
ArgumentNullException.ThrowIfNull(result);
using var sha256 = SHA256.Create();
var buffer = new byte[128 * 1024];
foreach (var file in result.FilePaths.OrderBy(static path => path, StringComparer.Ordinal))
{
var normalized = file.Replace("\\", "/");
var pathBytes = Encoding.UTF8.GetBytes(normalized);
_ = sha256.TransformBlock(pathBytes, 0, pathBytes.Length, null, 0);
var fullPath = ResolveFullPath(result.ExportDirectory, normalized);
using var stream = File.OpenRead(fullPath);
int read;
while ((read = stream.Read(buffer, 0, buffer.Length)) > 0)
{
_ = sha256.TransformBlock(buffer, 0, read, null, 0);
}
}
_ = sha256.TransformFinalBlock(Array.Empty<byte>(), 0, 0);
var hash = sha256.Hash ?? Array.Empty<byte>();
var hex = Convert.ToHexString(hash).ToLowerInvariant();
return $"sha256:{hex}";
}
private static string ResolveFullPath(string root, string normalizedRelativePath)
{
var segments = normalizedRelativePath.Split('/', StringSplitOptions.RemoveEmptyEntries);
var parts = new string[segments.Length + 1];
parts[0] = root;
for (var i = 0; i < segments.Length; i++)
{
parts[i + 1] = segments[i];
}
return Path.Combine(parts);
}
}

View File

@@ -0,0 +1,28 @@
using System;
using System.Reflection;
namespace StellaOps.Feedser.Exporter.Json;
public static class ExporterVersion
{
public static string GetVersion(Type anchor)
{
ArgumentNullException.ThrowIfNull(anchor);
var assembly = anchor.Assembly;
var informational = assembly.GetCustomAttribute<AssemblyInformationalVersionAttribute>()?.InformationalVersion;
if (!string.IsNullOrWhiteSpace(informational))
{
return informational;
}
var fileVersion = assembly.GetCustomAttribute<AssemblyFileVersionAttribute>()?.Version;
if (!string.IsNullOrWhiteSpace(fileVersion))
{
return fileVersion!;
}
var version = assembly.GetName().Version;
return version?.ToString() ?? "0.0.0";
}
}

View File

@@ -0,0 +1,12 @@
using StellaOps.Feedser.Models;
namespace StellaOps.Feedser.Exporter.Json;
public interface IJsonExportPathResolver
{
/// <summary>
/// Returns the relative path (using platform directory separators) for the supplied advisory.
/// Path must not include the leading export root.
/// </summary>
string GetRelativePath(Advisory advisory);
}

View File

@@ -0,0 +1,37 @@
using System;
namespace StellaOps.Feedser.Exporter.Json;
/// <summary>
/// Metadata describing a single file produced by the JSON exporter.
/// </summary>
public sealed class JsonExportFile
{
public JsonExportFile(string relativePath, long length, string digest)
{
RelativePath = relativePath ?? throw new ArgumentNullException(nameof(relativePath));
if (relativePath.Length == 0)
{
throw new ArgumentException("Relative path cannot be empty.", nameof(relativePath));
}
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
Digest = digest ?? throw new ArgumentNullException(nameof(digest));
if (digest.Length == 0)
{
throw new ArgumentException("Digest cannot be empty.", nameof(digest));
}
Length = length;
}
public string RelativePath { get; }
public long Length { get; }
public string Digest { get; }
}

View File

@@ -0,0 +1,30 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Feedser.Core.Jobs;
namespace StellaOps.Feedser.Exporter.Json;
public sealed class JsonExportJob : IJob
{
public const string JobKind = "export:json";
public static readonly TimeSpan DefaultTimeout = TimeSpan.FromMinutes(10);
public static readonly TimeSpan DefaultLeaseDuration = TimeSpan.FromMinutes(5);
private readonly JsonFeedExporter _exporter;
private readonly ILogger<JsonExportJob> _logger;
public JsonExportJob(JsonFeedExporter exporter, ILogger<JsonExportJob> logger)
{
_exporter = exporter ?? throw new ArgumentNullException(nameof(exporter));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
{
_logger.LogInformation("Executing JSON export job {RunId}", context.RunId);
await _exporter.ExportAsync(context.Services, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Completed JSON export job {RunId}", context.RunId);
}
}

View File

@@ -0,0 +1,66 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Feedser.Exporter.Json;
internal static class JsonExportManifestWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true,
};
public static async Task WriteAsync(
JsonExportResult result,
string digest,
string exporterVersion,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(result);
ArgumentException.ThrowIfNullOrEmpty(digest);
ArgumentException.ThrowIfNullOrEmpty(exporterVersion);
var exportId = Path.GetFileName(result.ExportDirectory);
var files = result.Files
.Select(static file => new JsonExportManifestFile(file.RelativePath.Replace("\\", "/", StringComparison.Ordinal), file.Length, file.Digest))
.ToArray();
var manifest = new JsonExportManifest(
exportId,
result.ExportedAt.UtcDateTime,
digest,
result.AdvisoryCount,
result.TotalBytes,
files.Length,
files,
exporterVersion);
var payload = JsonSerializer.SerializeToUtf8Bytes(manifest, SerializerOptions);
var manifestPath = Path.Combine(result.ExportDirectory, "manifest.json");
await File.WriteAllBytesAsync(manifestPath, payload, cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(manifestPath, result.ExportedAt.UtcDateTime);
}
private sealed record JsonExportManifest(
[property: JsonPropertyOrder(1)] string ExportId,
[property: JsonPropertyOrder(2)] DateTime GeneratedAt,
[property: JsonPropertyOrder(3)] string Digest,
[property: JsonPropertyOrder(4)] int AdvisoryCount,
[property: JsonPropertyOrder(5)] long TotalBytes,
[property: JsonPropertyOrder(6)] int FileCount,
[property: JsonPropertyOrder(7)] IReadOnlyList<JsonExportManifestFile> Files,
[property: JsonPropertyOrder(8)] string ExporterVersion);
private sealed record JsonExportManifestFile(
[property: JsonPropertyOrder(1)] string Path,
[property: JsonPropertyOrder(2)] long Bytes,
[property: JsonPropertyOrder(3)] string Digest);
}

View File

@@ -0,0 +1,34 @@
using System.IO;
namespace StellaOps.Feedser.Exporter.Json;
/// <summary>
/// Configuration for JSON exporter output paths and determinism controls.
/// </summary>
public sealed class JsonExportOptions
{
/// <summary>
/// Root directory where exports are written. Default "exports/json".
/// </summary>
public string OutputRoot { get; set; } = Path.Combine("exports", "json");
/// <summary>
/// Format string applied to the export timestamp to produce the directory name.
/// </summary>
public string DirectoryNameFormat { get; set; } = "yyyyMMdd'T'HHmmss'Z'";
/// <summary>
/// Optional static name for the symlink (or directory junction) pointing at the most recent export.
/// </summary>
public string LatestSymlinkName { get; set; } = "latest";
/// <summary>
/// When true, attempts to re-point <see cref="LatestSymlinkName"/> after a successful export.
/// </summary>
public bool MaintainLatestSymlink { get; set; } = true;
/// <summary>
/// Optional repository identifier recorded alongside export state metadata.
/// </summary>
public string? TargetRepository { get; set; }
}

View File

@@ -0,0 +1,46 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Feedser.Exporter.Json;
public sealed class JsonExportResult
{
public JsonExportResult(
string exportDirectory,
DateTimeOffset exportedAt,
IEnumerable<JsonExportFile> files,
int advisoryCount,
long totalBytes)
{
if (string.IsNullOrWhiteSpace(exportDirectory))
{
throw new ArgumentException("Export directory must be provided.", nameof(exportDirectory));
}
ExportDirectory = exportDirectory;
ExportedAt = exportedAt;
AdvisoryCount = advisoryCount;
TotalBytes = totalBytes;
var list = (files ?? throw new ArgumentNullException(nameof(files)))
.Where(static file => file is not null)
.ToImmutableArray();
Files = list;
FilePaths = list.Select(static file => file.RelativePath).ToImmutableArray();
}
public string ExportDirectory { get; }
public DateTimeOffset ExportedAt { get; }
public ImmutableArray<JsonExportFile> Files { get; }
public ImmutableArray<string> FilePaths { get; }
public int AdvisoryCount { get; }
public long TotalBytes { get; }
}

View File

@@ -0,0 +1,239 @@
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using StellaOps.Feedser.Models;
namespace StellaOps.Feedser.Exporter.Json;
/// <summary>
/// Writes canonical advisory snapshots into a vuln-list style directory tree with deterministic ordering.
/// </summary>
public sealed class JsonExportSnapshotBuilder
{
private static readonly Encoding Utf8NoBom = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false);
private readonly JsonExportOptions _options;
private readonly IJsonExportPathResolver _pathResolver;
public JsonExportSnapshotBuilder(JsonExportOptions options, IJsonExportPathResolver pathResolver)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
}
public Task<JsonExportResult> WriteAsync(
IReadOnlyCollection<Advisory> advisories,
DateTimeOffset exportedAt,
string? exportName = null,
CancellationToken cancellationToken = default)
{
if (advisories is null)
{
throw new ArgumentNullException(nameof(advisories));
}
return WriteAsync(EnumerateAsync(advisories, cancellationToken), exportedAt, exportName, cancellationToken);
}
public async Task<JsonExportResult> WriteAsync(
IAsyncEnumerable<Advisory> advisories,
DateTimeOffset exportedAt,
string? exportName = null,
CancellationToken cancellationToken = default)
{
if (advisories is null)
{
throw new ArgumentNullException(nameof(advisories));
}
var exportDirectoryName = exportName ?? exportedAt.UtcDateTime.ToString(_options.DirectoryNameFormat, CultureInfo.InvariantCulture);
if (string.IsNullOrWhiteSpace(exportDirectoryName))
{
throw new InvalidOperationException("Export directory name resolved to an empty string.");
}
var exportRoot = EnsureDirectoryExists(Path.GetFullPath(_options.OutputRoot));
TrySetDirectoryTimestamp(exportRoot, exportedAt);
var exportDirectory = Path.Combine(exportRoot, exportDirectoryName);
if (Directory.Exists(exportDirectory))
{
Directory.Delete(exportDirectory, recursive: true);
}
Directory.CreateDirectory(exportDirectory);
TrySetDirectoryTimestamp(exportDirectory, exportedAt);
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var files = new List<JsonExportFile>();
long totalBytes = 0L;
var advisoryCount = 0;
await foreach (var advisory in advisories.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
advisoryCount++;
var entry = Resolve(advisory);
if (!seen.Add(entry.RelativePath))
{
throw new InvalidOperationException($"Multiple advisories resolved to the same path '{entry.RelativePath}'.");
}
var destination = Combine(exportDirectory, entry.Segments);
var destinationDirectory = Path.GetDirectoryName(destination);
if (!string.IsNullOrEmpty(destinationDirectory))
{
EnsureDirectoryExists(destinationDirectory);
TrySetDirectoryTimestamp(destinationDirectory, exportedAt);
}
var payload = SnapshotSerializer.ToSnapshot(entry.Advisory);
var bytes = Utf8NoBom.GetBytes(payload);
await File.WriteAllBytesAsync(destination, bytes, cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(destination, exportedAt.UtcDateTime);
var digest = ComputeDigest(bytes);
files.Add(new JsonExportFile(entry.RelativePath, bytes.LongLength, digest));
totalBytes += bytes.LongLength;
}
files.Sort(static (left, right) => string.CompareOrdinal(left.RelativePath, right.RelativePath));
return new JsonExportResult(exportDirectory, exportedAt, files, advisoryCount, totalBytes);
}
private static async IAsyncEnumerable<Advisory> EnumerateAsync(
IEnumerable<Advisory> advisories,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
foreach (var advisory in advisories)
{
cancellationToken.ThrowIfCancellationRequested();
yield return advisory;
await Task.Yield();
}
}
private static string EnsureDirectoryExists(string directory)
{
if (string.IsNullOrWhiteSpace(directory))
{
throw new ArgumentException("Directory path must be provided.", nameof(directory));
}
Directory.CreateDirectory(directory);
return directory;
}
private static string Combine(string root, IReadOnlyList<string> segments)
{
var parts = new string[segments.Count + 1];
parts[0] = root;
for (var i = 0; i < segments.Count; i++)
{
parts[i + 1] = segments[i];
}
return Path.Combine(parts);
}
private static void TrySetDirectoryTimestamp(string directory, DateTimeOffset timestamp)
{
try
{
Directory.SetLastWriteTimeUtc(directory, timestamp.UtcDateTime);
}
catch (IOException)
{
// Ignore failure to set timestamps; not critical for content determinism.
}
catch (UnauthorizedAccessException)
{
// Ignore permission issues when setting timestamps.
}
catch (PlatformNotSupportedException)
{
// Some platforms may not support this operation.
}
}
private PathResolution Resolve(Advisory advisory)
{
if (advisory is null)
{
throw new ArgumentNullException(nameof(advisory));
}
var relativePath = _pathResolver.GetRelativePath(advisory);
var segments = NormalizeRelativePath(relativePath);
var normalized = string.Join('/', segments);
return new PathResolution(advisory, normalized, segments);
}
private static string[] NormalizeRelativePath(string relativePath)
{
if (string.IsNullOrWhiteSpace(relativePath))
{
throw new InvalidOperationException("Path resolver returned an empty path.");
}
if (Path.IsPathRooted(relativePath))
{
throw new InvalidOperationException("Path resolver returned an absolute path; only relative paths are supported.");
}
var pieces = relativePath.Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
if (pieces.Length == 0)
{
throw new InvalidOperationException("Path resolver produced no path segments.");
}
var sanitized = new string[pieces.Length];
for (var i = 0; i < pieces.Length; i++)
{
var segment = pieces[i];
if (segment == "." || segment == "..")
{
throw new InvalidOperationException("Relative paths cannot include '.' or '..' segments.");
}
sanitized[i] = SanitizeSegment(segment);
}
return sanitized;
}
private static string SanitizeSegment(string segment)
{
var invalid = Path.GetInvalidFileNameChars();
Span<char> buffer = stackalloc char[segment.Length];
var count = 0;
foreach (var ch in segment)
{
if (ch == '/' || ch == '\\' || Array.IndexOf(invalid, ch) >= 0)
{
buffer[count++] = '_';
}
else
{
buffer[count++] = ch;
}
}
var sanitized = new string(buffer[..count]).Trim();
return string.IsNullOrEmpty(sanitized) ? "_" : sanitized;
}
private sealed record PathResolution(Advisory Advisory, string RelativePath, IReadOnlyList<string> Segments);
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
var hex = Convert.ToHexString(hash).ToLowerInvariant();
return $"sha256:{hex}";
}
}

View File

@@ -0,0 +1,59 @@
using System;
using System.IO;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.DependencyInjection;
using StellaOps.Feedser.Core.Jobs;
using StellaOps.Feedser.Storage.Mongo.Exporting;
namespace StellaOps.Feedser.Exporter.Json;
public sealed class JsonExporterDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "feedser:exporters:json";
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.TryAddSingleton<IJsonExportPathResolver, VulnListJsonExportPathResolver>();
services.TryAddSingleton<ExportStateManager>();
services.AddOptions<JsonExportOptions>()
.Bind(configuration.GetSection(ConfigurationSection))
.PostConfigure(static options =>
{
if (string.IsNullOrWhiteSpace(options.OutputRoot))
{
options.OutputRoot = Path.Combine("exports", "json");
}
if (string.IsNullOrWhiteSpace(options.DirectoryNameFormat))
{
options.DirectoryNameFormat = "yyyyMMdd'T'HHmmss'Z'";
}
});
services.AddSingleton<JsonFeedExporter>();
services.AddTransient<JsonExportJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
if (!options.Definitions.ContainsKey(JsonExportJob.JobKind))
{
options.Definitions[JsonExportJob.JobKind] = new JobDefinition(
JsonExportJob.JobKind,
typeof(JsonExportJob),
JsonExportJob.DefaultTimeout,
JsonExportJob.DefaultLeaseDuration,
null,
true);
}
});
return services;
}
}

View File

@@ -0,0 +1,23 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Exporter.Json;
public sealed class JsonExporterPlugin : IExporterPlugin
{
public string Name => JsonFeedExporter.ExporterName;
public bool IsAvailable(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return services.GetService<IAdvisoryStore>() is not null;
}
public IFeedExporter Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<JsonFeedExporter>(services);
}
}

View File

@@ -0,0 +1,150 @@
using System;
using System.Globalization;
using System.IO;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Exporting;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Exporter.Json;
public sealed class JsonFeedExporter : IFeedExporter
{
public const string ExporterName = "json";
public const string ExporterId = "export:json";
private readonly IAdvisoryStore _advisoryStore;
private readonly JsonExportOptions _options;
private readonly IJsonExportPathResolver _pathResolver;
private readonly ExportStateManager _stateManager;
private readonly ILogger<JsonFeedExporter> _logger;
private readonly TimeProvider _timeProvider;
private readonly string _exporterVersion;
public JsonFeedExporter(
IAdvisoryStore advisoryStore,
IOptions<JsonExportOptions> options,
IJsonExportPathResolver pathResolver,
ExportStateManager stateManager,
ILogger<JsonFeedExporter> logger,
TimeProvider? timeProvider = null)
{
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
_stateManager = stateManager ?? throw new ArgumentNullException(nameof(stateManager));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_exporterVersion = ExporterVersion.GetVersion(typeof(JsonFeedExporter));
}
public string Name => ExporterName;
public async Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var exportedAt = _timeProvider.GetUtcNow();
var exportId = exportedAt.ToString(_options.DirectoryNameFormat, CultureInfo.InvariantCulture);
var exportRoot = Path.GetFullPath(_options.OutputRoot);
_logger.LogInformation("Starting JSON export {ExportId}", exportId);
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
var builder = new JsonExportSnapshotBuilder(_options, _pathResolver);
var advisoryStream = _advisoryStore.StreamAsync(cancellationToken);
var result = await builder.WriteAsync(advisoryStream, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
var digest = ExportDigestCalculator.ComputeTreeDigest(result);
_logger.LogInformation(
"JSON export {ExportId} wrote {FileCount} files ({Bytes} bytes) covering {AdvisoryCount} advisories with digest {Digest}",
exportId,
result.Files.Length,
result.TotalBytes,
result.AdvisoryCount,
digest);
if (existingState is not null && string.Equals(existingState.LastFullDigest, digest, StringComparison.Ordinal))
{
_logger.LogInformation("JSON export {ExportId} produced unchanged digest; skipping state update.", exportId);
TryDeleteDirectory(result.ExportDirectory);
return;
}
await _stateManager.StoreFullExportAsync(
ExporterId,
exportId,
digest,
cursor: digest,
targetRepository: _options.TargetRepository,
exporterVersion: _exporterVersion,
cancellationToken: cancellationToken).ConfigureAwait(false);
await JsonExportManifestWriter.WriteAsync(result, digest, _exporterVersion, cancellationToken).ConfigureAwait(false);
if (_options.MaintainLatestSymlink)
{
TryUpdateLatestSymlink(exportRoot, result.ExportDirectory);
}
}
private void TryUpdateLatestSymlink(string exportRoot, string exportDirectory)
{
if (string.IsNullOrWhiteSpace(_options.LatestSymlinkName))
{
return;
}
var latestPath = Path.Combine(exportRoot, _options.LatestSymlinkName);
try
{
if (Directory.Exists(latestPath) || File.Exists(latestPath))
{
TryRemoveExistingPointer(latestPath);
}
Directory.CreateSymbolicLink(latestPath, exportDirectory);
_logger.LogDebug("Updated latest JSON export pointer to {Target}", exportDirectory);
}
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or PlatformNotSupportedException)
{
_logger.LogWarning(ex, "Failed to update latest JSON export pointer at {LatestPath}", latestPath);
}
}
private void TryRemoveExistingPointer(string latestPath)
{
try
{
var attributes = File.GetAttributes(latestPath);
if (attributes.HasFlag(FileAttributes.Directory))
{
Directory.Delete(latestPath, recursive: false);
}
else
{
File.Delete(latestPath);
}
}
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
{
_logger.LogWarning(ex, "Failed to remove existing latest pointer {LatestPath}", latestPath);
}
}
private void TryDeleteDirectory(string path)
{
try
{
if (Directory.Exists(path))
{
Directory.Delete(path, recursive: true);
}
}
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
{
_logger.LogWarning(ex, "Failed to remove unchanged export directory {ExportDirectory}", path);
}
}
}

View File

@@ -1,14 +1,22 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Normalization\StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,11 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|Directory layout strategy (vuln-list mirror)|BE-Export|Models|DONE `VulnListJsonExportPathResolver` maps CVE, GHSA, distro, and vendor identifiers into vuln-list style paths.|
|Deterministic serializer|BE-Export|Models|DONE Canonical serializer + snapshot builder emit stable JSON across runs.|
|ExportState read/write|BE-Export|Storage.Mongo|DONE `JsonFeedExporter` reads prior state, stores digests/cursors, and skips unchanged exports.|
|JsonExportJob wiring|BE-Export|Core|DONE Job scheduler options now configurable via DI; JSON job registered with scheduler.|
|Snapshot tests for file tree|QA|Exporters|DONE Added resolver/exporter tests asserting tree layout and deterministic behavior.|
|Parity smoke vs upstream vuln-list|QA|Exporters|DONE `JsonExporterParitySmokeTests` covers common ecosystems against vuln-list layout.|
|Stream advisories during export|BE-Export|Storage.Mongo|DONE exporter + streaming-only test ensures single enumeration and per-file digest capture.|
|Emit export manifest with digest metadata|BE-Export|Exporters|DONE manifest now includes per-file digests/sizes alongside tree digest.|

View File

@@ -0,0 +1,455 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Normalization.Identifiers;
namespace StellaOps.Feedser.Exporter.Json;
/// <summary>
/// Path resolver approximating the directory layout used by aquasecurity/vuln-list.
/// Handles common vendor, distro, and ecosystem shapes with deterministic fallbacks.
/// </summary>
public sealed class VulnListJsonExportPathResolver : IJsonExportPathResolver
{
private static readonly Regex CvePattern = new("^CVE-(?<year>\\d{4})-(?<id>\\d{4,})$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex GhsaPattern = new("^GHSA-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex UsnPattern = new("^USN-(?<id>\\d+-\\d+)(?<suffix>[a-z])?$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex DebianPattern = new("^(?<prefix>DLA|DSA|ELA)-(?<id>\\d+-\\d+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex RedHatPattern = new("^RH(?<type>SA|BA|EA)-(?<rest>[0-9:.-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex AmazonPattern = new("^ALAS(?<channel>2|2022|2023)?-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex OraclePattern = new("^(?<kind>ELSA|ELBA|ELSA-OCI|ELBA-OCI)-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex PhotonPattern = new("^PHSA-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex RockyPattern = new("^RLSA-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex SusePattern = new("^SUSE-(?<kind>SU|RU|OU|SB)-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Dictionary<string, string[]> SourceDirectoryMap = new(StringComparer.OrdinalIgnoreCase)
{
["nvd"] = new[] { "nvd" },
["ghsa"] = new[] { "ghsa" },
["github"] = new[] { "ghsa" },
["osv"] = new[] { "osv" },
["redhat"] = new[] { "redhat", "oval" },
["ubuntu"] = new[] { "ubuntu" },
["debian"] = new[] { "debian" },
["oracle"] = new[] { "oracle" },
["photon"] = new[] { "photon" },
["rocky"] = new[] { "rocky" },
["suse"] = new[] { "suse" },
["amazon"] = new[] { "amazon" },
["aws"] = new[] { "amazon" },
["alpine"] = new[] { "alpine" },
["wolfi"] = new[] { "wolfi" },
["chainguard"] = new[] { "chainguard" },
["cert-fr"] = new[] { "cert", "fr" },
["cert-in"] = new[] { "cert", "in" },
["cert-cc"] = new[] { "cert", "cc" },
["cert-bund"] = new[] { "cert", "bund" },
["cisa"] = new[] { "ics", "cisa" },
["ics-cisa"] = new[] { "ics", "cisa" },
["ics-kaspersky"] = new[] { "ics", "kaspersky" },
["kaspersky"] = new[] { "ics", "kaspersky" },
};
private static readonly Dictionary<string, string> GhsaEcosystemMap = new(StringComparer.OrdinalIgnoreCase)
{
["go"] = "go",
["golang"] = "go",
["npm"] = "npm",
["maven"] = "maven",
["pypi"] = "pip",
["pip"] = "pip",
["nuget"] = "nuget",
["composer"] = "composer",
["packagist"] = "composer",
["rubygems"] = "rubygems",
["gem"] = "rubygems",
["swift"] = "swift",
["cargo"] = "cargo",
["hex"] = "hex",
["pub"] = "pub",
["github"] = "github",
["docker"] = "container",
};
public string GetRelativePath(Advisory advisory)
{
if (advisory is null)
{
throw new ArgumentNullException(nameof(advisory));
}
var identifier = SelectPreferredIdentifier(advisory);
if (identifier.Length == 0)
{
throw new InvalidOperationException("Unable to derive identifier for advisory.");
}
var layout = ResolveLayout(advisory, identifier);
var segments = new string[layout.Segments.Length + 1];
for (var i = 0; i < layout.Segments.Length; i++)
{
segments[i] = layout.Segments[i];
}
segments[^1] = layout.FileName;
return Path.Combine(segments);
}
private static Layout ResolveLayout(Advisory advisory, string identifier)
{
if (TryResolveCve(identifier, out var layout))
{
return layout;
}
if (TryResolveGhsa(advisory, identifier, out layout))
{
return layout;
}
if (TryResolveUsn(identifier, out layout) ||
TryResolveDebian(identifier, out layout) ||
TryResolveRedHat(identifier, out layout) ||
TryResolveAmazon(identifier, out layout) ||
TryResolveOracle(identifier, out layout) ||
TryResolvePhoton(identifier, out layout) ||
TryResolveRocky(identifier, out layout) ||
TryResolveSuse(identifier, out layout))
{
return layout;
}
if (TryResolveByProvenance(advisory, identifier, out layout))
{
return layout;
}
return new Layout(new[] { "misc" }, CreateFileName(identifier));
}
private static bool TryResolveCve(string identifier, out Layout layout)
{
var match = CvePattern.Match(identifier);
if (!match.Success)
{
layout = default;
return false;
}
var year = match.Groups["year"].Value;
layout = new Layout(new[] { "nvd", year }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveGhsa(Advisory advisory, string identifier, out Layout layout)
{
if (!GhsaPattern.IsMatch(identifier))
{
layout = default;
return false;
}
if (TryGetGhsaPackage(advisory, out var ecosystem, out var packagePath))
{
layout = new Layout(new[] { "ghsa", ecosystem, packagePath }, CreateFileName(identifier, uppercase: true));
return true;
}
layout = new Layout(new[] { "github", "advisories" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveUsn(string identifier, out Layout layout)
{
if (!UsnPattern.IsMatch(identifier))
{
layout = default;
return false;
}
layout = new Layout(new[] { "ubuntu" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveDebian(string identifier, out Layout layout)
{
var match = DebianPattern.Match(identifier);
if (!match.Success)
{
layout = default;
return false;
}
layout = new Layout(new[] { "debian" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveRedHat(string identifier, out Layout layout)
{
if (!RedHatPattern.IsMatch(identifier))
{
layout = default;
return false;
}
layout = new Layout(new[] { "redhat", "oval" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveAmazon(string identifier, out Layout layout)
{
var match = AmazonPattern.Match(identifier);
if (!match.Success)
{
layout = default;
return false;
}
var channel = match.Groups["channel"].Value;
var subdirectory = channel switch
{
"2" => "2",
"2023" => "2023",
"2022" => "2022",
_ => "1",
};
layout = new Layout(new[] { "amazon", subdirectory }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveOracle(string identifier, out Layout layout)
{
if (!OraclePattern.IsMatch(identifier))
{
layout = default;
return false;
}
layout = new Layout(new[] { "oracle", "linux" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolvePhoton(string identifier, out Layout layout)
{
if (!PhotonPattern.IsMatch(identifier))
{
layout = default;
return false;
}
layout = new Layout(new[] { "photon" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveRocky(string identifier, out Layout layout)
{
if (!RockyPattern.IsMatch(identifier))
{
layout = default;
return false;
}
layout = new Layout(new[] { "rocky" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveSuse(string identifier, out Layout layout)
{
if (!SusePattern.IsMatch(identifier))
{
layout = default;
return false;
}
layout = new Layout(new[] { "suse" }, CreateFileName(identifier, uppercase: true));
return true;
}
private static bool TryResolveByProvenance(Advisory advisory, string identifier, out Layout layout)
{
foreach (var source in EnumerateDistinctProvenanceSources(advisory))
{
if (SourceDirectoryMap.TryGetValue(source, out var segments))
{
layout = new Layout(segments, CreateFileName(identifier));
return true;
}
}
layout = default;
return false;
}
private static bool TryGetGhsaPackage(Advisory advisory, out string ecosystem, out string packagePath)
{
foreach (var package in advisory.AffectedPackages)
{
if (!TryParsePackageUrl(package.Identifier, out var type, out var encodedPath))
{
continue;
}
if (GhsaEcosystemMap.TryGetValue(type, out var mapped))
{
ecosystem = mapped;
}
else
{
ecosystem = type.ToLowerInvariant();
}
packagePath = encodedPath;
return true;
}
ecosystem = "advisories";
packagePath = "_";
return false;
}
private static bool TryParsePackageUrl(string identifier, out string type, out string encodedPath)
{
type = string.Empty;
encodedPath = string.Empty;
if (!IdentifierNormalizer.TryNormalizePackageUrl(identifier, out _, out var packageUrl))
{
return false;
}
var segments = packageUrl!.NamespaceSegments.IsDefaultOrEmpty
? new[] { packageUrl.Name }
: packageUrl.NamespaceSegments.Append(packageUrl.Name).ToArray();
type = packageUrl.Type;
encodedPath = string.Join("%2F", segments);
return true;
}
private static string CreateFileName(string identifier, bool uppercase = false)
{
var candidate = uppercase ? identifier.ToUpperInvariant() : identifier;
return $"{SanitizeFileName(candidate)}.json";
}
private static IEnumerable<string> EnumerateDistinctProvenanceSources(Advisory advisory)
{
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var source in advisory.Provenance)
{
if (TryAddSource(source.Source))
{
yield return source.Source;
}
}
foreach (var reference in advisory.References)
{
if (TryAddSource(reference.Provenance.Source))
{
yield return reference.Provenance.Source;
}
}
foreach (var package in advisory.AffectedPackages)
{
foreach (var source in package.Provenance)
{
if (TryAddSource(source.Source))
{
yield return source.Source;
}
}
foreach (var range in package.VersionRanges)
{
if (TryAddSource(range.Provenance.Source))
{
yield return range.Provenance.Source;
}
}
}
foreach (var metric in advisory.CvssMetrics)
{
if (TryAddSource(metric.Provenance.Source))
{
yield return metric.Provenance.Source;
}
}
bool TryAddSource(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
return seen.Add(value);
}
}
private static string SelectPreferredIdentifier(Advisory advisory)
{
if (TrySelectIdentifier(advisory.AdvisoryKey, out var preferred))
{
return preferred;
}
foreach (var alias in advisory.Aliases)
{
if (TrySelectIdentifier(alias, out preferred))
{
return preferred;
}
}
return advisory.AdvisoryKey.Trim();
}
private static bool TrySelectIdentifier(string value, out string identifier)
{
identifier = string.Empty;
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
if (CvePattern.IsMatch(trimmed) || GhsaPattern.IsMatch(trimmed))
{
identifier = trimmed;
return true;
}
identifier = trimmed;
return false;
}
private static string SanitizeFileName(string name)
{
var invalid = Path.GetInvalidFileNameChars();
Span<char> buffer = stackalloc char[name.Length];
var count = 0;
foreach (var ch in name)
{
if (ch == '/' || ch == '\\' || Array.IndexOf(invalid, ch) >= 0)
{
buffer[count++] = '_';
}
else
{
buffer[count++] = ch;
}
}
var sanitized = new string(buffer[..count]).Trim();
return string.IsNullOrEmpty(sanitized) ? "advisory" : sanitized;
}
private readonly record struct Layout(string[] Segments, string FileName);
}

View File

@@ -0,0 +1,24 @@
# AGENTS
## Role
Exporter producing a Trivy-compatible database artifact for self-hosting or offline use. v0: JSON list + metadata; v1: integrate official trivy-db builder or write BoltDB directly; pack and optionally push via ORAS.
## Scope
- Read canonical advisories; serialize payload for builder or intermediate; write metadata.json (generatedAt, counts).
- Output root: exports/trivy/<yyyyMMddHHmmss>; deterministic path components.
- OCI/Trivy expectations: layer media type application/vnd.aquasec.trivy.db.layer.v1.tar+gzip; config media type application/vnd.aquasec.trivy.config.v1+json; tag (e.g., 2).
- Optional ORAS push; optional offline bundle (db.tar.gz + metadata.json).
- DI: TrivyExporter + Jobs.TrivyExportJob registered by TrivyExporterDependencyInjectionRoutine.
- Export_state recording: capture digests, counts, start/end timestamps for idempotent reruns and incremental packaging.
## Participants
- Storage.Mongo.AdvisoryStore as input.
- Core scheduler runs export job; WebService/Plugins trigger it.
- JSON exporter (optional precursor) if choosing the builder path.
## Interfaces & contracts
- IFeedExporter.Name = "trivy-db"; ExportAsync(IServiceProvider, CancellationToken).
- FeedserOptions.packaging.trivy governs repo/tag/publish/offline_bundle.
- Deterministic sorting and timestamp discipline (UTC; consider build reproducibility knobs).
## In/Out of scope
In: assembling builder inputs, packing tar.gz, pushing to registry when configured.
Out: signing (external pipeline), scanner behavior.
## Observability & security expectations
- Metrics: export.trivy.records, size_bytes, duration, oras.push.success/fail.
- Logs: export path, repo/tag, digest; redact credentials; backoff on push errors.

View File

@@ -1,25 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbExporterPlugin : IExporterPlugin
{
public string Name => "trivydb";
public bool IsAvailable(IServiceProvider services) => true;
public IFeedExporter Create(IServiceProvider services) => new StubExporter(Name);
private sealed class StubExporter : IFeedExporter
{
public StubExporter(string name) => Name = name;
public string Name { get; }
public Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
}
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Feedser.Exporter.Json;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public interface ITrivyDbBuilder
{
Task<TrivyDbBuilderResult> BuildAsync(
JsonExportResult jsonTree,
DateTimeOffset exportedAt,
string exportId,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,9 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public interface ITrivyDbOrasPusher
{
Task PushAsync(string layoutPath, string reference, string exportId, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,10 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record OciDescriptor(
[property: JsonPropertyName("mediaType")] string MediaType,
[property: JsonPropertyName("digest")] string Digest,
[property: JsonPropertyName("size")] long Size,
[property: JsonPropertyName("annotations")] IReadOnlyDictionary<string, string>? Annotations = null);

View File

@@ -0,0 +1,8 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record OciIndex(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("manifests")] IReadOnlyList<OciDescriptor> Manifests);

View File

@@ -0,0 +1,10 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record OciManifest(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("mediaType")] string MediaType,
[property: JsonPropertyName("config")] OciDescriptor Config,
[property: JsonPropertyName("layers")] IReadOnlyList<OciDescriptor> Layers);

View File

@@ -1,14 +1,22 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Exporter.Json\StellaOps.Feedser.Exporter.Json.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj" />
</ItemGroup>
</Project>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,13 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|Fix method name typo GetExportRoot' -> GetExportRoot|BE-Export|Exporters|DONE `TrivyDbExportOptions.GetExportRoot` helper added with unit coverage.|
|Implement BoltDB builder integration (v0 via trivy-db CLI)|BE-Export|Env|DONE `TrivyDbBoltBuilder` shells `trivy-db build` against our JSON tree with deterministic packaging.|
|Pack db.tar.gz + metadata.json|BE-Export|Exporters|DONE Builder output re-packed with fixed timestamps and zeroed gzip mtime.|
|ORAS push support|BE-Export|Exporters|DONE Optional `TrivyDbOrasPusher` shells `oras cp --from-oci-layout` with configurable args/env.|
|Offline bundle toggle|BE-Export|Exporters|DONE Deterministic OCI layout bundle emitted when enabled.|
|Deterministic ordering of advisories|BE-Export|Models|TODO Sort by advisoryKey; stable array orders.|
|End-to-end tests with small dataset|QA|Exporters|TODO Assert media types and reproducible digests across runs.|
|ExportState persistence & idempotence|BE-Export|Storage.Mongo|DOING `ExportStateManager` keeps stable base export metadata; delta reset remains pending.|
|Streamed package building to avoid large copies|BE-Export|Exporters|TODO refactor package writer to stream without double-buffering metadata/archive payloads.|
|Plan incremental/delta exports|BE-Export|Exporters|TODO design reuse of existing blobs/layers when inputs unchanged instead of rewriting full trees each run.|

View File

@@ -0,0 +1,11 @@
using System;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record TrivyConfigDocument(
[property: JsonPropertyName("mediaType")] string MediaType,
[property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt,
[property: JsonPropertyName("databaseVersion")] string DatabaseVersion,
[property: JsonPropertyName("databaseDigest")] string DatabaseDigest,
[property: JsonPropertyName("databaseSize")] long DatabaseSize);

View File

@@ -0,0 +1,62 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbBlob
{
private readonly Func<CancellationToken, ValueTask<Stream>> _openReadAsync;
private TrivyDbBlob(Func<CancellationToken, ValueTask<Stream>> openReadAsync, long length)
{
_openReadAsync = openReadAsync ?? throw new ArgumentNullException(nameof(openReadAsync));
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
Length = length;
}
public long Length { get; }
public ValueTask<Stream> OpenReadAsync(CancellationToken cancellationToken)
=> _openReadAsync(cancellationToken);
public static TrivyDbBlob FromBytes(ReadOnlyMemory<byte> payload)
{
if (payload.IsEmpty)
{
return new TrivyDbBlob(static _ => ValueTask.FromResult<Stream>(Stream.Null), 0);
}
return new TrivyDbBlob(
cancellationToken => ValueTask.FromResult<Stream>(new MemoryStream(payload.ToArray(), writable: false)),
payload.Length);
}
public static TrivyDbBlob FromFile(string path, long length)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("File path must be provided.", nameof(path));
}
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
return new TrivyDbBlob(
cancellationToken => ValueTask.FromResult<Stream>(new FileStream(
path,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan)),
length);
}
}

View File

@@ -0,0 +1,376 @@
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Formats.Tar;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Exporter.Json;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbBoltBuilder : ITrivyDbBuilder
{
private readonly TrivyDbExportOptions _options;
private readonly ILogger<TrivyDbBoltBuilder> _logger;
public TrivyDbBoltBuilder(IOptions<TrivyDbExportOptions> options, ILogger<TrivyDbBoltBuilder> logger)
{
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<TrivyDbBuilderResult> BuildAsync(
JsonExportResult jsonTree,
DateTimeOffset exportedAt,
string exportId,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(jsonTree);
ArgumentException.ThrowIfNullOrEmpty(exportId);
var builderRoot = PrepareBuilderRoot(jsonTree.ExportDirectory, exportId);
var outputDir = Path.Combine(builderRoot, "out");
Directory.CreateDirectory(outputDir);
try
{
await RunCliAsync(jsonTree.ExportDirectory, outputDir, cancellationToken).ConfigureAwait(false);
}
catch
{
TryDeleteDirectory(builderRoot);
throw;
}
var metadataPath = Path.Combine(outputDir, "metadata.json");
var dbPath = Path.Combine(outputDir, "trivy.db");
if (!File.Exists(metadataPath))
{
TryDeleteDirectory(builderRoot);
throw new InvalidOperationException($"trivy-db metadata not found at '{metadataPath}'.");
}
if (!File.Exists(dbPath))
{
TryDeleteDirectory(builderRoot);
throw new InvalidOperationException($"trivy.db not found at '{dbPath}'.");
}
var archivePath = Path.Combine(builderRoot, "db.tar.gz");
await CreateArchiveAsync(archivePath, exportedAt, metadataPath, dbPath, cancellationToken).ConfigureAwait(false);
var digest = await ComputeDigestAsync(archivePath, cancellationToken).ConfigureAwait(false);
var length = new FileInfo(archivePath).Length;
var builderMetadata = await File.ReadAllBytesAsync(metadataPath, cancellationToken).ConfigureAwait(false);
return new TrivyDbBuilderResult(
archivePath,
digest,
length,
builderMetadata,
builderRoot);
}
private string PrepareBuilderRoot(string exportDirectory, string exportId)
{
var root = Path.Combine(exportDirectory, $".builder-{exportId}");
if (Directory.Exists(root))
{
Directory.Delete(root, recursive: true);
}
Directory.CreateDirectory(root);
return root;
}
private static void TryDeleteDirectory(string directory)
{
try
{
if (Directory.Exists(directory))
{
Directory.Delete(directory, recursive: true);
}
}
catch
{
// ignore cleanup failures
}
}
private async Task RunCliAsync(string cacheDir, string outputDir, CancellationToken cancellationToken)
{
var builderOptions = _options.Builder ?? new TrivyDbBuilderOptions();
var executable = string.IsNullOrWhiteSpace(builderOptions.ExecutablePath)
? "trivy-db"
: builderOptions.ExecutablePath;
var targets = builderOptions.OnlyUpdateTargets ?? new System.Collections.Generic.List<string>();
var environment = builderOptions.Environment ?? new System.Collections.Generic.Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var startInfo = new ProcessStartInfo
{
FileName = executable,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
};
startInfo.ArgumentList.Add("build");
startInfo.ArgumentList.Add("--cache-dir");
startInfo.ArgumentList.Add(cacheDir);
startInfo.ArgumentList.Add("--output-dir");
startInfo.ArgumentList.Add(outputDir);
if (builderOptions.UpdateInterval != default)
{
startInfo.ArgumentList.Add("--update-interval");
startInfo.ArgumentList.Add(ToGoDuration(builderOptions.UpdateInterval));
}
if (targets.Count > 0)
{
foreach (var target in targets.Where(static t => !string.IsNullOrWhiteSpace(t)))
{
startInfo.ArgumentList.Add("--only-update");
startInfo.ArgumentList.Add(target);
}
}
if (!string.IsNullOrWhiteSpace(builderOptions.WorkingDirectory))
{
startInfo.WorkingDirectory = builderOptions.WorkingDirectory;
}
if (!builderOptions.InheritEnvironment)
{
startInfo.Environment.Clear();
}
foreach (var kvp in environment)
{
startInfo.Environment[kvp.Key] = kvp.Value;
}
using var process = new Process { StartInfo = startInfo, EnableRaisingEvents = false };
var stdOut = new StringBuilder();
var stdErr = new StringBuilder();
var stdoutCompletion = new TaskCompletionSource<object?>();
var stderrCompletion = new TaskCompletionSource<object?>();
process.OutputDataReceived += (_, e) =>
{
if (e.Data is null)
{
stdoutCompletion.TrySetResult(null);
}
else
{
stdOut.AppendLine(e.Data);
}
};
process.ErrorDataReceived += (_, e) =>
{
if (e.Data is null)
{
stderrCompletion.TrySetResult(null);
}
else
{
stdErr.AppendLine(e.Data);
}
};
_logger.LogInformation("Running {Executable} to build Trivy DB", executable);
try
{
if (!process.Start())
{
throw new InvalidOperationException($"Failed to start '{executable}'.");
}
}
catch (Exception ex)
{
throw new InvalidOperationException($"Failed to start '{executable}'.", ex);
}
process.BeginOutputReadLine();
process.BeginErrorReadLine();
using var registration = cancellationToken.Register(() =>
{
try
{
if (!process.HasExited)
{
process.Kill(entireProcessTree: true);
}
}
catch
{
// Ignore kill failures.
}
});
#if NET8_0_OR_GREATER
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
#else
await Task.Run(() => process.WaitForExit(), cancellationToken).ConfigureAwait(false);
#endif
await Task.WhenAll(stdoutCompletion.Task, stderrCompletion.Task).ConfigureAwait(false);
if (process.ExitCode != 0)
{
_logger.LogError("trivy-db exited with code {ExitCode}. stderr: {Stderr}", process.ExitCode, stdErr.ToString());
throw new InvalidOperationException($"'{executable}' exited with code {process.ExitCode}.");
}
if (stdOut.Length > 0)
{
_logger.LogDebug("trivy-db output: {StdOut}", stdOut.ToString());
}
if (stdErr.Length > 0)
{
_logger.LogWarning("trivy-db warnings: {StdErr}", stdErr.ToString());
}
}
private static async Task CreateArchiveAsync(
string archivePath,
DateTimeOffset exportedAt,
string metadataPath,
string dbPath,
CancellationToken cancellationToken)
{
await using var archiveStream = new FileStream(
archivePath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
await using var gzip = new GZipStream(archiveStream, CompressionLevel.SmallestSize, leaveOpen: true);
await using var writer = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: false);
var timestamp = exportedAt.UtcDateTime;
foreach (var file in EnumerateArchiveEntries(metadataPath, dbPath))
{
cancellationToken.ThrowIfCancellationRequested();
var entry = new PaxTarEntry(TarEntryType.RegularFile, file.Name)
{
ModificationTime = timestamp,
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
};
await using var source = new FileStream(
file.Path,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
entry.DataStream = source;
writer.WriteEntry(entry);
}
await writer.DisposeAsync().ConfigureAwait(false);
await ZeroGzipMtimeAsync(archivePath, cancellationToken).ConfigureAwait(false);
}
private static IEnumerable<(string Name, string Path)> EnumerateArchiveEntries(string metadataPath, string dbPath)
{
yield return ("metadata.json", metadataPath);
yield return ("trivy.db", dbPath);
}
private static async Task<string> ComputeDigestAsync(string archivePath, CancellationToken cancellationToken)
{
await using var stream = new FileStream(
archivePath,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static async Task ZeroGzipMtimeAsync(string archivePath, CancellationToken cancellationToken)
{
await using var stream = new FileStream(
archivePath,
FileMode.Open,
FileAccess.ReadWrite,
FileShare.None,
bufferSize: 8,
options: FileOptions.Asynchronous);
if (stream.Length < 10)
{
return;
}
stream.Position = 4;
var zeros = new byte[4];
await stream.WriteAsync(zeros, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static string ToGoDuration(TimeSpan span)
{
if (span <= TimeSpan.Zero)
{
return "0s";
}
span = span.Duration();
var builder = new StringBuilder();
var totalHours = (int)span.TotalHours;
if (totalHours > 0)
{
builder.Append(totalHours);
builder.Append('h');
}
var minutes = span.Minutes;
if (minutes > 0)
{
builder.Append(minutes);
builder.Append('m');
}
var seconds = span.Seconds + span.Milliseconds / 1000.0;
if (seconds > 0 || builder.Length == 0)
{
if (span.Milliseconds == 0)
{
builder.Append(span.Seconds);
}
else
{
builder.Append(seconds.ToString("0.###", CultureInfo.InvariantCulture));
}
builder.Append('s');
}
return builder.ToString();
}
}

View File

@@ -0,0 +1,10 @@
using System;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record TrivyDbBuilderResult(
string ArchivePath,
string ArchiveDigest,
long ArchiveLength,
ReadOnlyMemory<byte> BuilderMetadata,
string WorkingDirectory);

View File

@@ -0,0 +1,30 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Feedser.Core.Jobs;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbExportJob : IJob
{
public const string JobKind = "export:trivy-db";
public static readonly TimeSpan DefaultTimeout = TimeSpan.FromMinutes(20);
public static readonly TimeSpan DefaultLeaseDuration = TimeSpan.FromMinutes(10);
private readonly TrivyDbFeedExporter _exporter;
private readonly ILogger<TrivyDbExportJob> _logger;
public TrivyDbExportJob(TrivyDbFeedExporter exporter, ILogger<TrivyDbExportJob> logger)
{
_exporter = exporter ?? throw new ArgumentNullException(nameof(exporter));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
{
_logger.LogInformation("Executing Trivy DB export job {RunId}", context.RunId);
await _exporter.ExportAsync(context.Services, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Completed Trivy DB export job {RunId}", context.RunId);
}
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Feedser.Exporter.TrivyDb;
public enum TrivyDbExportMode
{
Full,
Delta,
Skip,
}

View File

@@ -0,0 +1,80 @@
using System;
using System.IO;
using System.Collections.Generic;
using StellaOps.Feedser.Exporter.Json;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbExportOptions
{
public string OutputRoot { get; set; } = Path.Combine("exports", "trivy");
public string ReferencePrefix { get; set; } = "feedser/trivy";
public string TagFormat { get; set; } = "yyyyMMdd'T'HHmmss'Z'";
public string DatabaseVersionFormat { get; set; } = "yyyyMMdd'T'HHmmss'Z'";
public bool KeepWorkingTree { get; set; }
public string? TargetRepository { get; set; }
public JsonExportOptions Json { get; set; } = new()
{
OutputRoot = Path.Combine("exports", "trivy", "tree")
};
public TrivyDbBuilderOptions Builder { get; set; } = new();
public TrivyDbOrasOptions Oras { get; set; } = new();
public TrivyDbOfflineBundleOptions OfflineBundle { get; set; } = new();
public string GetExportRoot(string exportId)
{
ArgumentException.ThrowIfNullOrEmpty(exportId);
var root = Path.GetFullPath(OutputRoot);
return Path.Combine(root, exportId);
}
}
public sealed class TrivyDbBuilderOptions
{
public string ExecutablePath { get; set; } = "trivy-db";
public string? WorkingDirectory { get; set; }
public TimeSpan UpdateInterval { get; set; } = TimeSpan.FromHours(24);
public List<string> OnlyUpdateTargets { get; set; } = new();
public Dictionary<string, string> Environment { get; set; } = new(StringComparer.OrdinalIgnoreCase);
public bool InheritEnvironment { get; set; } = true;
}
public sealed class TrivyDbOrasOptions
{
public bool Enabled { get; set; }
public string ExecutablePath { get; set; } = "oras";
public string? WorkingDirectory { get; set; }
public bool InheritEnvironment { get; set; } = true;
public List<string> AdditionalArguments { get; set; } = new();
public Dictionary<string, string> Environment { get; set; } = new(StringComparer.OrdinalIgnoreCase);
public bool SkipTlsVerify { get; set; }
public bool UseHttp { get; set; }
}
public sealed class TrivyDbOfflineBundleOptions
{
public bool Enabled { get; set; }
public string? FileName { get; set; }
}

View File

@@ -0,0 +1,7 @@
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record TrivyDbExportPlan(
TrivyDbExportMode Mode,
string TreeDigest,
string? BaseExportId,
string? BaseManifestDigest);

View File

@@ -0,0 +1,33 @@
using System;
using StellaOps.Feedser.Storage.Mongo.Exporting;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbExportPlanner
{
public TrivyDbExportPlan CreatePlan(ExportStateRecord? existingState, string treeDigest)
{
ArgumentException.ThrowIfNullOrEmpty(treeDigest);
if (existingState is null)
{
return new TrivyDbExportPlan(TrivyDbExportMode.Full, treeDigest, BaseExportId: null, BaseManifestDigest: null);
}
if (string.Equals(existingState.ExportCursor, treeDigest, StringComparison.Ordinal))
{
return new TrivyDbExportPlan(
TrivyDbExportMode.Skip,
treeDigest,
existingState.BaseExportId,
existingState.LastFullDigest);
}
// Placeholder for future delta support current behavior always rebuilds when tree changes.
return new TrivyDbExportPlan(
TrivyDbExportMode.Full,
treeDigest,
existingState.BaseExportId,
existingState.LastFullDigest);
}
}

View File

@@ -0,0 +1,64 @@
using System;
using System.IO;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.DependencyInjection;
using StellaOps.Feedser.Core.Jobs;
using StellaOps.Feedser.Exporter.Json;
using StellaOps.Feedser.Storage.Mongo.Exporting;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbExporterDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "feedser:exporters:trivyDb";
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.TryAddSingleton<IJsonExportPathResolver, VulnListJsonExportPathResolver>();
services.TryAddSingleton<ExportStateManager>();
services.AddOptions<TrivyDbExportOptions>()
.Bind(configuration.GetSection(ConfigurationSection))
.PostConfigure(static options =>
{
options.OutputRoot = Normalize(options.OutputRoot, Path.Combine("exports", "trivy"));
options.Json.OutputRoot = Normalize(options.Json.OutputRoot, Path.Combine("exports", "trivy", "tree"));
options.TagFormat = string.IsNullOrWhiteSpace(options.TagFormat) ? "yyyyMMdd'T'HHmmss'Z'" : options.TagFormat;
options.DatabaseVersionFormat = string.IsNullOrWhiteSpace(options.DatabaseVersionFormat) ? "yyyyMMdd'T'HHmmss'Z'" : options.DatabaseVersionFormat;
options.ReferencePrefix = string.IsNullOrWhiteSpace(options.ReferencePrefix) ? "feedser/trivy" : options.ReferencePrefix;
});
services.AddSingleton<TrivyDbPackageBuilder>();
services.AddSingleton<TrivyDbOciWriter>();
services.AddSingleton<TrivyDbExportPlanner>();
services.AddSingleton<ITrivyDbBuilder, TrivyDbBoltBuilder>();
services.AddSingleton<ITrivyDbOrasPusher, TrivyDbOrasPusher>();
services.AddSingleton<TrivyDbFeedExporter>();
services.AddTransient<TrivyDbExportJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
if (!options.Definitions.ContainsKey(TrivyDbExportJob.JobKind))
{
options.Definitions[TrivyDbExportJob.JobKind] = new JobDefinition(
TrivyDbExportJob.JobKind,
typeof(TrivyDbExportJob),
TrivyDbExportJob.DefaultTimeout,
TrivyDbExportJob.DefaultLeaseDuration,
null,
true);
}
});
return services;
}
private static string Normalize(string? value, string fallback)
=> string.IsNullOrWhiteSpace(value) ? fallback : value;
}

View File

@@ -0,0 +1,23 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbExporterPlugin : IExporterPlugin
{
public string Name => TrivyDbFeedExporter.ExporterName;
public bool IsAvailable(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return services.GetService<IAdvisoryStore>() is not null;
}
public IFeedExporter Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<TrivyDbFeedExporter>(services);
}
}

View File

@@ -0,0 +1,365 @@
using System;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Security.Cryptography;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using System.Formats.Tar;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Exporter.Json;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Exporting;
using StellaOps.Plugin;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbFeedExporter : IFeedExporter
{
public const string ExporterName = "trivy-db";
public const string ExporterId = "export:trivy-db";
private readonly IAdvisoryStore _advisoryStore;
private readonly IJsonExportPathResolver _pathResolver;
private readonly TrivyDbExportOptions _options;
private readonly TrivyDbPackageBuilder _packageBuilder;
private readonly TrivyDbOciWriter _ociWriter;
private readonly ExportStateManager _stateManager;
private readonly TrivyDbExportPlanner _exportPlanner;
private readonly ITrivyDbBuilder _builder;
private readonly ITrivyDbOrasPusher _orasPusher;
private readonly ILogger<TrivyDbFeedExporter> _logger;
private readonly TimeProvider _timeProvider;
private readonly string _exporterVersion;
public TrivyDbFeedExporter(
IAdvisoryStore advisoryStore,
IJsonExportPathResolver pathResolver,
IOptions<TrivyDbExportOptions> options,
TrivyDbPackageBuilder packageBuilder,
TrivyDbOciWriter ociWriter,
ExportStateManager stateManager,
TrivyDbExportPlanner exportPlanner,
ITrivyDbBuilder builder,
ITrivyDbOrasPusher orasPusher,
ILogger<TrivyDbFeedExporter> logger,
TimeProvider? timeProvider = null)
{
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_packageBuilder = packageBuilder ?? throw new ArgumentNullException(nameof(packageBuilder));
_ociWriter = ociWriter ?? throw new ArgumentNullException(nameof(ociWriter));
_stateManager = stateManager ?? throw new ArgumentNullException(nameof(stateManager));
_exportPlanner = exportPlanner ?? throw new ArgumentNullException(nameof(exportPlanner));
_builder = builder ?? throw new ArgumentNullException(nameof(builder));
_orasPusher = orasPusher ?? throw new ArgumentNullException(nameof(orasPusher));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_exporterVersion = ExporterVersion.GetVersion(typeof(TrivyDbFeedExporter));
}
public string Name => ExporterName;
public async Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var exportedAt = _timeProvider.GetUtcNow();
var exportId = exportedAt.ToString(_options.TagFormat, CultureInfo.InvariantCulture);
var reference = $"{_options.ReferencePrefix}:{exportId}";
_logger.LogInformation("Starting Trivy DB export {ExportId}", exportId);
var jsonBuilder = new JsonExportSnapshotBuilder(_options.Json, _pathResolver);
var advisoryStream = _advisoryStore.StreamAsync(cancellationToken);
var jsonResult = await jsonBuilder.WriteAsync(advisoryStream, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Prepared Trivy JSON tree {ExportId} with {AdvisoryCount} advisories ({Bytes} bytes)",
exportId,
jsonResult.AdvisoryCount,
jsonResult.TotalBytes);
var treeDigest = ExportDigestCalculator.ComputeTreeDigest(jsonResult);
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
var plan = _exportPlanner.CreatePlan(existingState, treeDigest);
if (plan.Mode == TrivyDbExportMode.Skip)
{
_logger.LogInformation(
"Trivy DB export {ExportId} unchanged from base {BaseExport}; skipping OCI packaging.",
exportId,
plan.BaseExportId ?? "(none)");
if (!_options.KeepWorkingTree)
{
TryDeleteDirectory(jsonResult.ExportDirectory);
}
return;
}
var builderResult = await _builder.BuildAsync(jsonResult, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
var metadataBytes = CreateMetadataJson(builderResult.BuilderMetadata, treeDigest, jsonResult, exportedAt);
try
{
var package = _packageBuilder.BuildPackage(new TrivyDbPackageRequest(
metadataBytes,
builderResult.ArchivePath,
builderResult.ArchiveDigest,
builderResult.ArchiveLength,
exportedAt,
exportedAt.ToString(_options.DatabaseVersionFormat, CultureInfo.InvariantCulture)));
var destination = _options.GetExportRoot(exportId);
var ociResult = await _ociWriter.WriteAsync(package, destination, reference, cancellationToken).ConfigureAwait(false);
if (_options.Oras.Enabled)
{
await _orasPusher.PushAsync(destination, reference, exportId, cancellationToken).ConfigureAwait(false);
}
_logger.LogInformation(
"Trivy DB export {ExportId} wrote manifest {ManifestDigest}",
exportId,
ociResult.ManifestDigest);
await _stateManager.StoreFullExportAsync(
ExporterId,
exportId,
ociResult.ManifestDigest,
cursor: treeDigest,
targetRepository: _options.TargetRepository,
exporterVersion: _exporterVersion,
cancellationToken: cancellationToken).ConfigureAwait(false);
await CreateOfflineBundleAsync(destination, exportId, exportedAt, cancellationToken).ConfigureAwait(false);
}
finally
{
TryDeleteDirectory(builderResult.WorkingDirectory);
}
if (!_options.KeepWorkingTree)
{
TryDeleteDirectory(jsonResult.ExportDirectory);
}
}
private byte[] CreateMetadataJson(
ReadOnlyMemory<byte> builderMetadata,
string treeDigest,
JsonExportResult result,
DateTimeOffset exportedAt)
{
var metadata = new TrivyMetadata
{
GeneratedAt = exportedAt.UtcDateTime,
AdvisoryCount = result.AdvisoryCount,
TreeDigest = treeDigest,
TreeBytes = result.TotalBytes,
ExporterVersion = _exporterVersion,
Builder = ParseBuilderMetadata(builderMetadata.Span),
};
return JsonSerializer.SerializeToUtf8Bytes(metadata, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
});
}
private static BuilderMetadata? ParseBuilderMetadata(ReadOnlySpan<byte> payload)
{
if (payload.IsEmpty)
{
return null;
}
try
{
return JsonSerializer.Deserialize<BuilderMetadata>(payload, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true,
});
}
catch
{
return null;
}
}
private async Task CreateOfflineBundleAsync(string layoutPath, string exportId, DateTimeOffset exportedAt, CancellationToken cancellationToken)
{
if (!_options.OfflineBundle.Enabled)
{
return;
}
var parent = Path.GetDirectoryName(layoutPath) ?? layoutPath;
var fileName = string.IsNullOrWhiteSpace(_options.OfflineBundle.FileName)
? $"{exportId}.offline.tar.gz"
: _options.OfflineBundle.FileName.Replace("{exportId}", exportId, StringComparison.Ordinal);
var bundlePath = Path.IsPathRooted(fileName) ? fileName : Path.Combine(parent, fileName);
Directory.CreateDirectory(Path.GetDirectoryName(bundlePath)!);
if (File.Exists(bundlePath))
{
File.Delete(bundlePath);
}
var normalizedRoot = Path.GetFullPath(layoutPath);
var directories = Directory.GetDirectories(normalizedRoot, "*", SearchOption.AllDirectories)
.Select(dir => NormalizeTarPath(normalizedRoot, dir) + "/")
.OrderBy(static path => path, StringComparer.Ordinal)
.ToArray();
var files = Directory.GetFiles(normalizedRoot, "*", SearchOption.AllDirectories)
.Select(file => NormalizeTarPath(normalizedRoot, file))
.OrderBy(static path => path, StringComparer.Ordinal)
.ToArray();
await using (var archiveStream = new FileStream(
bundlePath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan))
await using (var gzip = new GZipStream(archiveStream, CompressionLevel.SmallestSize, leaveOpen: true))
await using (var writer = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: false))
{
var timestamp = exportedAt.UtcDateTime;
foreach (var directory in directories)
{
var entry = new PaxTarEntry(TarEntryType.Directory, directory)
{
ModificationTime = timestamp,
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute,
};
writer.WriteEntry(entry);
}
foreach (var relativePath in files)
{
var fullPath = Path.Combine(normalizedRoot, relativePath.Replace('/', Path.DirectorySeparatorChar));
var entry = new PaxTarEntry(TarEntryType.RegularFile, relativePath)
{
ModificationTime = timestamp,
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite |
UnixFileMode.GroupRead |
UnixFileMode.OtherRead,
};
await using var source = new FileStream(
fullPath,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
entry.DataStream = source;
writer.WriteEntry(entry);
}
}
await ZeroGzipMtimeAsync(bundlePath, cancellationToken).ConfigureAwait(false);
var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false);
var length = new FileInfo(bundlePath).Length;
_logger.LogInformation("Wrote offline bundle {BundlePath} ({Length} bytes, digest {Digest})", bundlePath, length, digest);
}
private static void TryDeleteDirectory(string directory)
{
try
{
if (Directory.Exists(directory))
{
Directory.Delete(directory, recursive: true);
}
}
catch
{
// Best effort cleanup ignore failures.
}
}
private static async Task ZeroGzipMtimeAsync(string archivePath, CancellationToken cancellationToken)
{
await using var stream = new FileStream(
archivePath,
FileMode.Open,
FileAccess.ReadWrite,
FileShare.None,
bufferSize: 8,
options: FileOptions.Asynchronous);
if (stream.Length < 10)
{
return;
}
stream.Position = 4;
var zeros = new byte[4];
await stream.WriteAsync(zeros, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static async Task<string> ComputeSha256Async(string path, CancellationToken cancellationToken)
{
await using var stream = new FileStream(
path,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string NormalizeTarPath(string root, string fullPath)
{
var relative = Path.GetRelativePath(root, fullPath);
var normalized = relative.Replace(Path.DirectorySeparatorChar, '/');
return string.IsNullOrEmpty(normalized) ? "." : normalized;
}
private sealed class TrivyMetadata
{
public DateTime GeneratedAt { get; set; }
public int AdvisoryCount { get; set; }
public string TreeDigest { get; set; } = string.Empty;
public long TreeBytes { get; set; }
public string ExporterVersion { get; set; } = string.Empty;
public BuilderMetadata? Builder { get; set; }
}
private sealed class BuilderMetadata
{
[JsonPropertyName("Version")]
public int Version { get; set; }
public DateTime NextUpdate { get; set; }
public DateTime UpdatedAt { get; set; }
public DateTime? DownloadedAt { get; set; }
}
}

View File

@@ -0,0 +1,9 @@
namespace StellaOps.Feedser.Exporter.TrivyDb;
public static class TrivyDbMediaTypes
{
public const string OciManifest = "application/vnd.oci.image.manifest.v1+json";
public const string OciImageIndex = "application/vnd.oci.image.index.v1+json";
public const string TrivyConfig = "application/vnd.aquasec.trivy.config.v1+json";
public const string TrivyLayer = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip";
}

View File

@@ -0,0 +1,8 @@
using System.Collections.Generic;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record TrivyDbOciWriteResult(
string RootDirectory,
string ManifestDigest,
IReadOnlyCollection<string> BlobDigests);

View File

@@ -0,0 +1,172 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Feedser.Exporter.TrivyDb;
/// <summary>
/// Writes a Trivy DB package to an OCI image layout directory with deterministic content.
/// </summary>
public sealed class TrivyDbOciWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
private static readonly byte[] OciLayoutBytes = Encoding.UTF8.GetBytes("{\"imageLayoutVersion\":\"1.0.0\"}");
public async Task<TrivyDbOciWriteResult> WriteAsync(
TrivyDbPackage package,
string destination,
string reference,
CancellationToken cancellationToken)
{
if (package is null)
{
throw new ArgumentNullException(nameof(package));
}
if (string.IsNullOrWhiteSpace(destination))
{
throw new ArgumentException("Destination directory must be provided.", nameof(destination));
}
if (string.IsNullOrWhiteSpace(reference))
{
throw new ArgumentException("Reference tag must be provided.", nameof(reference));
}
var root = Path.GetFullPath(destination);
if (Directory.Exists(root))
{
Directory.Delete(root, recursive: true);
}
Directory.CreateDirectory(root);
var timestamp = package.Config.GeneratedAt.UtcDateTime;
await WriteFileAsync(Path.Combine(root, "metadata.json"), package.MetadataJson.ToArray(), timestamp, cancellationToken).ConfigureAwait(false);
await WriteFileAsync(Path.Combine(root, "oci-layout"), OciLayoutBytes, timestamp, cancellationToken).ConfigureAwait(false);
var blobsRoot = Path.Combine(root, "blobs", "sha256");
Directory.CreateDirectory(blobsRoot);
Directory.SetLastWriteTimeUtc(Path.GetDirectoryName(blobsRoot)!, timestamp);
Directory.SetLastWriteTimeUtc(blobsRoot, timestamp);
var writtenDigests = new HashSet<string>(StringComparer.Ordinal);
foreach (var pair in package.Blobs)
{
if (writtenDigests.Add(pair.Key))
{
await WriteBlobAsync(blobsRoot, pair.Key, pair.Value, timestamp, cancellationToken).ConfigureAwait(false);
}
}
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(package.Manifest, SerializerOptions);
var manifestDigest = ComputeDigest(manifestBytes);
if (writtenDigests.Add(manifestDigest))
{
await WriteBlobAsync(blobsRoot, manifestDigest, TrivyDbBlob.FromBytes(manifestBytes), timestamp, cancellationToken).ConfigureAwait(false);
}
var manifestDescriptor = new OciDescriptor(
TrivyDbMediaTypes.OciManifest,
manifestDigest,
manifestBytes.LongLength,
new Dictionary<string, string>
{
["org.opencontainers.image.ref.name"] = reference,
});
var index = new OciIndex(2, new[] { manifestDescriptor });
var indexBytes = JsonSerializer.SerializeToUtf8Bytes(index, SerializerOptions);
await WriteFileAsync(Path.Combine(root, "index.json"), indexBytes, timestamp, cancellationToken).ConfigureAwait(false);
Directory.SetLastWriteTimeUtc(root, timestamp);
var blobDigests = writtenDigests.ToArray();
Array.Sort(blobDigests, StringComparer.Ordinal);
return new TrivyDbOciWriteResult(root, manifestDigest, blobDigests);
}
private static async Task WriteFileAsync(string path, byte[] bytes, DateTime utcTimestamp, CancellationToken cancellationToken)
{
var directory = Path.GetDirectoryName(path);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
Directory.SetLastWriteTimeUtc(directory, utcTimestamp);
}
await File.WriteAllBytesAsync(path, bytes, cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(path, utcTimestamp);
}
private static async Task WriteBlobAsync(string blobsRoot, string digest, TrivyDbBlob blob, DateTime utcTimestamp, CancellationToken cancellationToken)
{
var fileName = ResolveDigestFileName(digest);
var path = Path.Combine(blobsRoot, fileName);
var directory = Path.GetDirectoryName(path);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
Directory.SetLastWriteTimeUtc(directory, utcTimestamp);
}
await using var source = await blob.OpenReadAsync(cancellationToken).ConfigureAwait(false);
await using var destination = new FileStream(
path,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
await source.CopyToAsync(destination, cancellationToken).ConfigureAwait(false);
await destination.FlushAsync(cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(path, utcTimestamp);
}
private static string ResolveDigestFileName(string digest)
{
if (!digest.StartsWith("sha256:", StringComparison.Ordinal))
{
throw new InvalidOperationException($"Only sha256 digests are supported. Received '{digest}'.");
}
var hex = digest[7..];
if (hex.Length == 0)
{
throw new InvalidOperationException("Digest hex component cannot be empty.");
}
return hex;
}
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = System.Security.Cryptography.SHA256.HashData(payload);
var hex = Convert.ToHexString(hash);
Span<char> buffer = stackalloc char[7 + hex.Length]; // "sha256:" + hex
buffer[0] = 's';
buffer[1] = 'h';
buffer[2] = 'a';
buffer[3] = '2';
buffer[4] = '5';
buffer[5] = '6';
buffer[6] = ':';
for (var i = 0; i < hex.Length; i++)
{
buffer[7 + i] = char.ToLowerInvariant(hex[i]);
}
return new string(buffer);
}
}

View File

@@ -0,0 +1,209 @@
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbOrasPusher : ITrivyDbOrasPusher
{
private readonly TrivyDbExportOptions _options;
private readonly ILogger<TrivyDbOrasPusher> _logger;
public TrivyDbOrasPusher(IOptions<TrivyDbExportOptions> options, ILogger<TrivyDbOrasPusher> logger)
{
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task PushAsync(string layoutPath, string reference, string exportId, CancellationToken cancellationToken)
{
var orasOptions = _options.Oras;
if (!orasOptions.Enabled)
{
return;
}
if (string.IsNullOrWhiteSpace(reference))
{
throw new InvalidOperationException("ORAS push requested but reference is empty.");
}
if (!Directory.Exists(layoutPath))
{
throw new DirectoryNotFoundException($"OCI layout directory '{layoutPath}' does not exist.");
}
var executable = string.IsNullOrWhiteSpace(orasOptions.ExecutablePath) ? "oras" : orasOptions.ExecutablePath;
var tag = ResolveTag(reference, exportId);
var layoutReference = $"{layoutPath}:{tag}";
var startInfo = new ProcessStartInfo
{
FileName = executable,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
};
startInfo.ArgumentList.Add("cp");
startInfo.ArgumentList.Add("--from-oci-layout");
startInfo.ArgumentList.Add(layoutReference);
if (orasOptions.SkipTlsVerify)
{
startInfo.ArgumentList.Add("--insecure");
}
if (orasOptions.UseHttp)
{
startInfo.ArgumentList.Add("--plain-http");
}
if (orasOptions.AdditionalArguments is { Count: > 0 })
{
foreach (var arg in orasOptions.AdditionalArguments)
{
if (!string.IsNullOrWhiteSpace(arg))
{
startInfo.ArgumentList.Add(arg);
}
}
}
startInfo.ArgumentList.Add(reference);
if (!string.IsNullOrWhiteSpace(orasOptions.WorkingDirectory))
{
startInfo.WorkingDirectory = orasOptions.WorkingDirectory;
}
if (!orasOptions.InheritEnvironment)
{
startInfo.Environment.Clear();
}
if (orasOptions.Environment is { Count: > 0 })
{
foreach (var kvp in orasOptions.Environment)
{
if (!string.IsNullOrEmpty(kvp.Key))
{
startInfo.Environment[kvp.Key] = kvp.Value;
}
}
}
using var process = new Process { StartInfo = startInfo };
var stdout = new StringBuilder();
var stderr = new StringBuilder();
var stdoutCompletion = new TaskCompletionSource<object?>();
var stderrCompletion = new TaskCompletionSource<object?>();
process.OutputDataReceived += (_, e) =>
{
if (e.Data is null)
{
stdoutCompletion.TrySetResult(null);
}
else
{
stdout.AppendLine(e.Data);
}
};
process.ErrorDataReceived += (_, e) =>
{
if (e.Data is null)
{
stderrCompletion.TrySetResult(null);
}
else
{
stderr.AppendLine(e.Data);
}
};
_logger.LogInformation("Pushing Trivy DB export {ExportId} to {Reference} using {Executable}", exportId, reference, executable);
try
{
if (!process.Start())
{
throw new InvalidOperationException($"Failed to start '{executable}'.");
}
}
catch (Exception ex)
{
throw new InvalidOperationException($"Failed to start '{executable}'.", ex);
}
process.BeginOutputReadLine();
process.BeginErrorReadLine();
using var registration = cancellationToken.Register(() =>
{
try
{
if (!process.HasExited)
{
process.Kill(entireProcessTree: true);
}
}
catch
{
// ignore
}
});
#if NET8_0_OR_GREATER
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
#else
await Task.Run(() => process.WaitForExit(), cancellationToken).ConfigureAwait(false);
#endif
await Task.WhenAll(stdoutCompletion.Task, stderrCompletion.Task).ConfigureAwait(false);
if (process.ExitCode != 0)
{
_logger.LogError("ORAS push for {Reference} failed with code {Code}. stderr: {Stderr}", reference, process.ExitCode, stderr.ToString());
throw new InvalidOperationException($"'{executable}' exited with code {process.ExitCode}.");
}
if (stdout.Length > 0)
{
_logger.LogDebug("ORAS push output: {Stdout}", stdout.ToString());
}
if (stderr.Length > 0)
{
_logger.LogWarning("ORAS push warnings: {Stderr}", stderr.ToString());
}
}
private static string ResolveTag(string reference, string fallback)
{
if (string.IsNullOrWhiteSpace(reference))
{
return fallback;
}
var atIndex = reference.IndexOf('@');
if (atIndex >= 0)
{
reference = reference[..atIndex];
}
var slashIndex = reference.LastIndexOf('/');
var colonIndex = reference.LastIndexOf(':');
if (colonIndex > slashIndex && colonIndex >= 0)
{
return reference[(colonIndex + 1)..];
}
return fallback;
}
}

View File

@@ -0,0 +1,9 @@
using System.Collections.Generic;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record TrivyDbPackage(
OciManifest Manifest,
TrivyConfigDocument Config,
IReadOnlyDictionary<string, TrivyDbBlob> Blobs,
ReadOnlyMemory<byte> MetadataJson);

View File

@@ -0,0 +1,116 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Security.Cryptography;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed class TrivyDbPackageBuilder
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
public TrivyDbPackage BuildPackage(TrivyDbPackageRequest request)
{
if (request is null)
{
throw new ArgumentNullException(nameof(request));
}
if (request.MetadataJson.IsEmpty)
{
throw new ArgumentException("Metadata JSON payload must be provided.", nameof(request));
}
if (string.IsNullOrWhiteSpace(request.DatabaseArchivePath))
{
throw new ArgumentException("Database archive path must be provided.", nameof(request));
}
if (!File.Exists(request.DatabaseArchivePath))
{
throw new FileNotFoundException("Database archive path not found.", request.DatabaseArchivePath);
}
if (string.IsNullOrWhiteSpace(request.DatabaseDigest))
{
throw new ArgumentException("Database archive digest must be provided.", nameof(request));
}
if (request.DatabaseLength < 0)
{
throw new ArgumentOutOfRangeException(nameof(request.DatabaseLength));
}
var metadataBytes = request.MetadataJson;
var generatedAt = request.GeneratedAt.ToUniversalTime();
var configDocument = new TrivyConfigDocument(
TrivyDbMediaTypes.TrivyConfig,
generatedAt,
request.DatabaseVersion,
request.DatabaseDigest,
request.DatabaseLength);
var configBytes = JsonSerializer.SerializeToUtf8Bytes(configDocument, SerializerOptions);
var configDigest = ComputeDigest(configBytes);
var configDescriptor = new OciDescriptor(
TrivyDbMediaTypes.TrivyConfig,
configDigest,
configBytes.LongLength,
new Dictionary<string, string>
{
["org.opencontainers.image.title"] = "config.json",
});
var layerDescriptor = new OciDescriptor(
TrivyDbMediaTypes.TrivyLayer,
request.DatabaseDigest,
request.DatabaseLength,
new Dictionary<string, string>
{
["org.opencontainers.image.title"] = "db.tar.gz",
});
var manifest = new OciManifest(
2,
TrivyDbMediaTypes.OciManifest,
configDescriptor,
ImmutableArray.Create(layerDescriptor));
var blobs = new Dictionary<string, TrivyDbBlob>(StringComparer.Ordinal)
{
[configDigest] = TrivyDbBlob.FromBytes(configBytes),
[request.DatabaseDigest] = TrivyDbBlob.FromFile(request.DatabaseArchivePath, request.DatabaseLength),
};
return new TrivyDbPackage(manifest, configDocument, blobs, metadataBytes);
}
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
var hex = Convert.ToHexString(hash);
Span<char> buffer = stackalloc char[7 + hex.Length]; // "sha256:" + hex
buffer[0] = 's';
buffer[1] = 'h';
buffer[2] = 'a';
buffer[3] = '2';
buffer[4] = '5';
buffer[5] = '6';
buffer[6] = ':';
for (var i = 0; i < hex.Length; i++)
{
buffer[7 + i] = char.ToLowerInvariant(hex[i]);
}
return new string(buffer);
}
}

View File

@@ -0,0 +1,11 @@
using System;
namespace StellaOps.Feedser.Exporter.TrivyDb;
public sealed record TrivyDbPackageRequest(
ReadOnlyMemory<byte> MetadataJson,
string DatabaseArchivePath,
string DatabaseDigest,
long DatabaseLength,
DateTimeOffset GeneratedAt,
string DatabaseVersion);

View File

@@ -0,0 +1,25 @@
# AGENTS
## Role
Deterministic merge and reconciliation engine; builds identity graph via aliases; applies precedence (PSIRT/OVAL > NVD; KEV flag only; regional feeds enrich); produces canonical advisory JSON and merge_event audit trail.
## Scope
- Identity: resolve advisory_key (prefer CVE, else PSIRT/Distro/JVN/BDU/GHSA/ICSA); unify aliases; detect collisions.
- Precedence: override rules for affected ranges (vendor PSIRT/OVAL over registry), enrichment-only feeds (CERTs/JVN/RU-CERT), KEV toggles exploitKnown only.
- Range comparers: RPM NEVRA comparer (epoch:version-release), Debian EVR comparer, SemVer range resolver; platform-aware selection.
- Merge algorithm: stable ordering, pure functions, idempotence; compute beforeHash/afterHash over canonical form; write merge_event.
- Conflict reporting: counters and logs for identity conflicts, reference merges, range overrides.
## Participants
- Storage.Mongo (reads raw mapped advisories, writes merged docs plus merge_event).
- Models (canonical types).
- Exporters (consume merged canonical).
- Core/WebService (jobs: merge:run, maybe per-kind).
## Interfaces & contracts
- AdvisoryMergeService.MergeAsync(ids or byKind): returns summary {processed, merged, overrides, conflicts}.
- Precedence table configurable but with sane defaults: RedHat/Ubuntu/Debian/SUSE > Vendor PSIRT > GHSA/OSV > NVD; CERTs enrich; KEV sets flags.
- Range selection uses comparers: NevraComparer, DebEvrComparer, SemVerRange; deterministic tie-breakers.
- Provenance propagation merges unique entries; references deduped by (url, type).
## In/Out of scope
In: merge logic, precedence policy, hashing, event records, comparers.
Out: fetching/parsing, exporter packaging, signing.
## Observability & security expectations
- Metrics: merge.delta.count, merge.identity.conflicts, merge.range.overrides, merge.duration_ms.
- Logs: decisions (why replaced), keys involved, hashes; avoid dumping large blobs; redact secrets (none expected).

View File

@@ -1,6 +1 @@
namespace StellaOps.Feedser.Merge;
public class Class1
{
}
// Intentionally left blank; types moved into dedicated files.

View File

@@ -0,0 +1,232 @@
namespace StellaOps.Feedser.Merge.Comparers;
using System;
using StellaOps.Feedser.Normalization.Distro;
public sealed class DebianEvrComparer : IComparer<DebianEvr>, IComparer<string>
{
public static DebianEvrComparer Instance { get; } = new();
private DebianEvrComparer()
{
}
public int Compare(string? x, string? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var xParsed = DebianEvr.TryParse(x, out var xEvr);
var yParsed = DebianEvr.TryParse(y, out var yEvr);
if (xParsed && yParsed)
{
return Compare(xEvr, yEvr);
}
if (xParsed)
{
return 1;
}
if (yParsed)
{
return -1;
}
return string.Compare(x, y, StringComparison.Ordinal);
}
public int Compare(DebianEvr? x, DebianEvr? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var compare = x.Epoch.CompareTo(y.Epoch);
if (compare != 0)
{
return compare;
}
compare = CompareSegment(x.Version, y.Version);
if (compare != 0)
{
return compare;
}
compare = CompareSegment(x.Revision, y.Revision);
if (compare != 0)
{
return compare;
}
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
}
private static int CompareSegment(string left, string right)
{
var i = 0;
var j = 0;
while (i < left.Length || j < right.Length)
{
while (i < left.Length && !IsAlphaNumeric(left[i]) && left[i] != '~')
{
i++;
}
while (j < right.Length && !IsAlphaNumeric(right[j]) && right[j] != '~')
{
j++;
}
var leftChar = i < left.Length ? left[i] : '\0';
var rightChar = j < right.Length ? right[j] : '\0';
if (leftChar == '~' || rightChar == '~')
{
if (leftChar != rightChar)
{
return leftChar == '~' ? -1 : 1;
}
i += leftChar == '~' ? 1 : 0;
j += rightChar == '~' ? 1 : 0;
continue;
}
var leftIsDigit = char.IsDigit(leftChar);
var rightIsDigit = char.IsDigit(rightChar);
if (leftIsDigit && rightIsDigit)
{
var leftStart = i;
while (i < left.Length && char.IsDigit(left[i]))
{
i++;
}
var rightStart = j;
while (j < right.Length && char.IsDigit(right[j]))
{
j++;
}
var leftTrimmed = leftStart;
while (leftTrimmed < i && left[leftTrimmed] == '0')
{
leftTrimmed++;
}
var rightTrimmed = rightStart;
while (rightTrimmed < j && right[rightTrimmed] == '0')
{
rightTrimmed++;
}
var leftLength = i - leftTrimmed;
var rightLength = j - rightTrimmed;
if (leftLength != rightLength)
{
return leftLength.CompareTo(rightLength);
}
var comparison = left.AsSpan(leftTrimmed, leftLength)
.CompareTo(right.AsSpan(rightTrimmed, rightLength), StringComparison.Ordinal);
if (comparison != 0)
{
return comparison;
}
continue;
}
if (leftIsDigit)
{
return 1;
}
if (rightIsDigit)
{
return -1;
}
var leftOrder = CharOrder(leftChar);
var rightOrder = CharOrder(rightChar);
var orderComparison = leftOrder.CompareTo(rightOrder);
if (orderComparison != 0)
{
return orderComparison;
}
if (leftChar != rightChar)
{
return leftChar.CompareTo(rightChar);
}
if (leftChar == '\0')
{
return 0;
}
i++;
j++;
}
return 0;
}
private static bool IsAlphaNumeric(char value)
=> char.IsLetterOrDigit(value);
private static int CharOrder(char value)
{
if (value == '\0')
{
return 0;
}
if (value == '~')
{
return -1;
}
if (char.IsDigit(value))
{
return 0;
}
if (char.IsLetter(value))
{
return value;
}
return value + 256;
}
}

View File

@@ -0,0 +1,264 @@
namespace StellaOps.Feedser.Merge.Comparers;
using System;
using StellaOps.Feedser.Normalization.Distro;
public sealed class NevraComparer : IComparer<Nevra>, IComparer<string>
{
public static NevraComparer Instance { get; } = new();
private NevraComparer()
{
}
public int Compare(string? x, string? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var xParsed = Nevra.TryParse(x, out var xNevra);
var yParsed = Nevra.TryParse(y, out var yNevra);
if (xParsed && yParsed)
{
return Compare(xNevra, yNevra);
}
if (xParsed)
{
return 1;
}
if (yParsed)
{
return -1;
}
return string.Compare(x, y, StringComparison.Ordinal);
}
public int Compare(Nevra? x, Nevra? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var compare = string.Compare(x.Name, y.Name, StringComparison.Ordinal);
if (compare != 0)
{
return compare;
}
compare = string.Compare(x.Architecture ?? string.Empty, y.Architecture ?? string.Empty, StringComparison.Ordinal);
if (compare != 0)
{
return compare;
}
compare = x.Epoch.CompareTo(y.Epoch);
if (compare != 0)
{
return compare;
}
compare = RpmVersionComparer.Compare(x.Version, y.Version);
if (compare != 0)
{
return compare;
}
compare = RpmVersionComparer.Compare(x.Release, y.Release);
if (compare != 0)
{
return compare;
}
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
}
}
internal static class RpmVersionComparer
{
public static int Compare(string? left, string? right)
{
left ??= string.Empty;
right ??= string.Empty;
var i = 0;
var j = 0;
while (true)
{
var leftHasTilde = SkipToNextSegment(left, ref i);
var rightHasTilde = SkipToNextSegment(right, ref j);
if (leftHasTilde || rightHasTilde)
{
if (leftHasTilde && rightHasTilde)
{
continue;
}
return leftHasTilde ? -1 : 1;
}
var leftEnd = i >= left.Length;
var rightEnd = j >= right.Length;
if (leftEnd || rightEnd)
{
if (leftEnd && rightEnd)
{
return 0;
}
return leftEnd ? -1 : 1;
}
var leftDigit = char.IsDigit(left[i]);
var rightDigit = char.IsDigit(right[j]);
if (leftDigit && !rightDigit)
{
return 1;
}
if (!leftDigit && rightDigit)
{
return -1;
}
int compare;
if (leftDigit)
{
compare = CompareNumericSegment(left, ref i, right, ref j);
}
else
{
compare = CompareAlphaSegment(left, ref i, right, ref j);
}
if (compare != 0)
{
return compare;
}
}
}
private static bool SkipToNextSegment(string value, ref int index)
{
var sawTilde = false;
while (index < value.Length)
{
var current = value[index];
if (current == '~')
{
sawTilde = true;
index++;
break;
}
if (char.IsLetterOrDigit(current))
{
break;
}
index++;
}
return sawTilde;
}
private static int CompareNumericSegment(string value, ref int index, string other, ref int otherIndex)
{
var start = index;
while (index < value.Length && char.IsDigit(value[index]))
{
index++;
}
var otherStart = otherIndex;
while (otherIndex < other.Length && char.IsDigit(other[otherIndex]))
{
otherIndex++;
}
var trimmedStart = start;
while (trimmedStart < index && value[trimmedStart] == '0')
{
trimmedStart++;
}
var otherTrimmedStart = otherStart;
while (otherTrimmedStart < otherIndex && other[otherTrimmedStart] == '0')
{
otherTrimmedStart++;
}
var length = index - trimmedStart;
var otherLength = otherIndex - otherTrimmedStart;
if (length != otherLength)
{
return length.CompareTo(otherLength);
}
var comparison = value.AsSpan(trimmedStart, length)
.CompareTo(other.AsSpan(otherTrimmedStart, otherLength), StringComparison.Ordinal);
if (comparison != 0)
{
return comparison;
}
return 0;
}
private static int CompareAlphaSegment(string value, ref int index, string other, ref int otherIndex)
{
var start = index;
while (index < value.Length && char.IsLetter(value[index]))
{
index++;
}
var otherStart = otherIndex;
while (otherIndex < other.Length && char.IsLetter(other[otherIndex]))
{
otherIndex++;
}
var length = index - start;
var otherLength = otherIndex - otherStart;
var comparison = value.AsSpan(start, length)
.CompareTo(other.AsSpan(otherStart, otherLength), StringComparison.Ordinal);
if (comparison != 0)
{
return comparison;
}
return 0;
}
}

View File

@@ -0,0 +1,73 @@
namespace StellaOps.Feedser.Merge.Comparers;
using System.Diagnostics.CodeAnalysis;
using Semver;
/// <summary>
/// Provides helpers to interpret introduced/fixed/lastAffected SemVer ranges and compare versions.
/// </summary>
public static class SemanticVersionRangeResolver
{
public static bool TryParse(string? value, [NotNullWhen(true)] out SemVersion? result)
=> SemVersion.TryParse(value, SemVersionStyles.Any, out result);
public static SemVersion Parse(string value)
=> SemVersion.Parse(value, SemVersionStyles.Any);
/// <summary>
/// Resolves the effective start and end versions using introduced/fixed/lastAffected semantics.
/// </summary>
public static (SemVersion? introduced, SemVersion? exclusiveUpperBound, SemVersion? inclusiveUpperBound) ResolveWindows(
string? introduced,
string? fixedVersion,
string? lastAffected)
{
var introducedVersion = TryParse(introduced, out var parsedIntroduced) ? parsedIntroduced : null;
var fixedVersionParsed = TryParse(fixedVersion, out var parsedFixed) ? parsedFixed : null;
var lastAffectedVersion = TryParse(lastAffected, out var parsedLast) ? parsedLast : null;
SemVersion? exclusiveUpper = null;
SemVersion? inclusiveUpper = null;
if (fixedVersionParsed is not null)
{
exclusiveUpper = fixedVersionParsed;
}
else if (lastAffectedVersion is not null)
{
inclusiveUpper = lastAffectedVersion;
exclusiveUpper = NextPatch(lastAffectedVersion);
}
return (introducedVersion, exclusiveUpper, inclusiveUpper);
}
public static int Compare(string? left, string? right)
{
var leftParsed = TryParse(left, out var leftSemver);
var rightParsed = TryParse(right, out var rightSemver);
if (leftParsed && rightParsed)
{
return SemVersion.CompareSortOrder(leftSemver, rightSemver);
}
if (leftParsed)
{
return 1;
}
if (rightParsed)
{
return -1;
}
return string.Compare(left, right, StringComparison.Ordinal);
}
private static SemVersion NextPatch(SemVersion version)
{
return new SemVersion(version.Major, version.Minor, version.Patch + 1);
}
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Feedser.Merge.Options;
/// <summary>
/// Configurable precedence overrides for advisory sources.
/// </summary>
public sealed class AdvisoryPrecedenceOptions
{
/// <summary>
/// Mapping of provenance source identifiers to precedence ranks. Lower numbers take precedence.
/// </summary>
public IDictionary<string, int> Ranks { get; init; } = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase);
}

View File

@@ -0,0 +1,296 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Feedser.Merge.Options;
using StellaOps.Feedser.Models;
namespace StellaOps.Feedser.Merge.Services;
/// <summary>
/// Merges canonical advisories emitted by different sources into a single precedence-resolved advisory.
/// </summary>
public sealed class AdvisoryPrecedenceMerger
{
private static readonly IReadOnlyDictionary<string, int> DefaultPrecedence = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
{
["redhat"] = 0,
["ubuntu"] = 0,
["debian"] = 0,
["suse"] = 0,
["msrc"] = 1,
["oracle"] = 1,
["adobe"] = 1,
["chromium"] = 1,
["jvn"] = 2,
["certfr"] = 2,
["certin"] = 2,
["ics-kaspersky"] = 2,
["kev"] = 6,
["nvd"] = 5,
};
private static readonly Meter MergeMeter = new("StellaOps.Feedser.Merge");
private static readonly Counter<long> OverridesCounter = MergeMeter.CreateCounter<long>(
"feedser.merge.overrides",
unit: "count",
description: "Number of times lower-precedence advisories were overridden by higher-precedence sources.");
private readonly AffectedPackagePrecedenceResolver _packageResolver;
private readonly IReadOnlyDictionary<string, int> _precedence;
private readonly int _fallbackRank;
private readonly System.TimeProvider _timeProvider;
private readonly ILogger<AdvisoryPrecedenceMerger> _logger;
public AdvisoryPrecedenceMerger()
: this(new AffectedPackagePrecedenceResolver(), DefaultPrecedence, System.TimeProvider.System, NullLogger<AdvisoryPrecedenceMerger>.Instance)
{
}
public AdvisoryPrecedenceMerger(AffectedPackagePrecedenceResolver packageResolver, System.TimeProvider? timeProvider = null)
: this(packageResolver, DefaultPrecedence, timeProvider ?? System.TimeProvider.System, NullLogger<AdvisoryPrecedenceMerger>.Instance)
{
}
public AdvisoryPrecedenceMerger(
AffectedPackagePrecedenceResolver packageResolver,
IReadOnlyDictionary<string, int> precedence,
System.TimeProvider timeProvider)
: this(packageResolver, precedence, timeProvider, NullLogger<AdvisoryPrecedenceMerger>.Instance)
{
}
public AdvisoryPrecedenceMerger(
AffectedPackagePrecedenceResolver packageResolver,
AdvisoryPrecedenceOptions? options,
System.TimeProvider timeProvider,
ILogger<AdvisoryPrecedenceMerger>? logger = null)
: this(packageResolver, MergePrecedence(DefaultPrecedence, options), timeProvider, logger)
{
}
public AdvisoryPrecedenceMerger(
AffectedPackagePrecedenceResolver packageResolver,
IReadOnlyDictionary<string, int> precedence,
System.TimeProvider timeProvider,
ILogger<AdvisoryPrecedenceMerger>? logger)
{
_packageResolver = packageResolver ?? throw new ArgumentNullException(nameof(packageResolver));
_precedence = precedence ?? throw new ArgumentNullException(nameof(precedence));
_fallbackRank = _precedence.Count == 0 ? 10 : _precedence.Values.Max() + 1;
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? NullLogger<AdvisoryPrecedenceMerger>.Instance;
}
public Advisory Merge(IEnumerable<Advisory> advisories)
{
if (advisories is null)
{
throw new ArgumentNullException(nameof(advisories));
}
var list = advisories.Where(static a => a is not null).ToList();
if (list.Count == 0)
{
throw new ArgumentException("At least one advisory is required for merge.", nameof(advisories));
}
var advisoryKey = list[0].AdvisoryKey;
if (list.Any(advisory => !string.Equals(advisory.AdvisoryKey, advisoryKey, StringComparison.Ordinal)))
{
throw new ArgumentException("All advisories must share the same advisory key.", nameof(advisories));
}
var ordered = list
.Select(advisory => new AdvisoryEntry(advisory, GetRank(advisory)))
.OrderBy(entry => entry.Rank)
.ThenByDescending(entry => entry.Advisory.Provenance.Length)
.ToArray();
var primary = ordered[0].Advisory;
var title = PickString(ordered, advisory => advisory.Title) ?? advisoryKey;
var summary = PickString(ordered, advisory => advisory.Summary);
var language = PickString(ordered, advisory => advisory.Language);
var severity = PickString(ordered, advisory => advisory.Severity);
var aliases = ordered
.SelectMany(entry => entry.Advisory.Aliases)
.Where(static alias => !string.IsNullOrWhiteSpace(alias))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
var references = ordered
.SelectMany(entry => entry.Advisory.References)
.Distinct()
.ToArray();
var affectedPackages = _packageResolver.Merge(ordered.SelectMany(entry => entry.Advisory.AffectedPackages));
var cvssMetrics = ordered
.SelectMany(entry => entry.Advisory.CvssMetrics)
.Distinct()
.ToArray();
var published = PickDateTime(ordered, static advisory => advisory.Published);
var modified = PickDateTime(ordered, static advisory => advisory.Modified) ?? published;
var provenance = ordered
.SelectMany(entry => entry.Advisory.Provenance)
.Distinct()
.ToList();
var precedenceTrace = ordered
.SelectMany(entry => entry.Sources)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static source => source, StringComparer.OrdinalIgnoreCase)
.ToArray();
var mergeProvenance = new AdvisoryProvenance(
source: "merge",
kind: "precedence",
value: string.Join("|", precedenceTrace),
recordedAt: _timeProvider.GetUtcNow());
provenance.Add(mergeProvenance);
var exploitKnown = ordered.Any(entry => entry.Advisory.ExploitKnown);
LogOverrides(advisoryKey, ordered);
return new Advisory(
advisoryKey,
title,
summary,
language,
published,
modified,
severity,
exploitKnown,
aliases,
references,
affectedPackages,
cvssMetrics,
provenance);
}
private string? PickString(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, string?> selector)
{
foreach (var entry in ordered)
{
var value = selector(entry.Advisory);
if (!string.IsNullOrWhiteSpace(value))
{
return value.Trim();
}
}
return null;
}
private DateTimeOffset? PickDateTime(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, DateTimeOffset?> selector)
{
foreach (var entry in ordered)
{
var value = selector(entry.Advisory);
if (value.HasValue)
{
return value.Value.ToUniversalTime();
}
}
return null;
}
private int GetRank(Advisory advisory)
{
var best = _fallbackRank;
foreach (var provenance in advisory.Provenance)
{
if (string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
if (_precedence.TryGetValue(provenance.Source, out var rank) && rank < best)
{
best = rank;
}
}
return best;
}
private static IReadOnlyDictionary<string, int> MergePrecedence(
IReadOnlyDictionary<string, int> defaults,
AdvisoryPrecedenceOptions? options)
{
if (options?.Ranks is null || options.Ranks.Count == 0)
{
return defaults;
}
var merged = new Dictionary<string, int>(defaults, StringComparer.OrdinalIgnoreCase);
foreach (var kvp in options.Ranks)
{
if (string.IsNullOrWhiteSpace(kvp.Key))
{
continue;
}
merged[kvp.Key.Trim()] = kvp.Value;
}
return merged;
}
private void LogOverrides(string advisoryKey, IReadOnlyList<AdvisoryEntry> ordered)
{
if (ordered.Count <= 1)
{
return;
}
var primary = ordered[0];
var primaryRank = primary.Rank;
var primarySources = string.Join(',', primary.Sources);
for (var i = 1; i < ordered.Count; i++)
{
var candidate = ordered[i];
if (candidate.Rank <= primaryRank)
{
continue;
}
var suppressedSources = string.Join(',', candidate.Sources);
OverridesCounter.Add(
1,
new KeyValuePair<string, object?>[]
{
new("advisory", advisoryKey),
new("primary_sources", primarySources),
new("suppressed_sources", suppressedSources),
});
_logger.LogInformation(
"Advisory precedence override for {AdvisoryKey}: kept {PrimarySources} (rank {PrimaryRank}) over {SuppressedSources} (rank {SuppressedRank})",
advisoryKey,
primarySources,
primaryRank,
suppressedSources,
candidate.Rank);
}
}
private readonly record struct AdvisoryEntry(Advisory Advisory, int Rank)
{
public IReadOnlyCollection<string> Sources { get; } = Advisory.Provenance
.Select(static p => p.Source)
.Where(static source => !string.IsNullOrWhiteSpace(source))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
}
}

View File

@@ -0,0 +1,105 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Feedser.Models;
namespace StellaOps.Feedser.Merge.Services;
/// <summary>
/// Applies source precedence rules to affected package sets so authoritative distro ranges override generic registry data.
/// </summary>
public sealed class AffectedPackagePrecedenceResolver
{
private static readonly IReadOnlyDictionary<string, int> DefaultPrecedence = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
{
["redhat"] = 0,
["ubuntu"] = 0,
["debian"] = 0,
["suse"] = 0,
["msrc"] = 1,
["oracle"] = 1,
["adobe"] = 1,
["chromium"] = 1,
["nvd"] = 5,
};
private readonly IReadOnlyDictionary<string, int> _precedence;
private readonly int _fallbackRank;
public AffectedPackagePrecedenceResolver()
: this(DefaultPrecedence)
{
}
public AffectedPackagePrecedenceResolver(IReadOnlyDictionary<string, int> precedence)
{
_precedence = precedence ?? throw new ArgumentNullException(nameof(precedence));
_fallbackRank = precedence.Count == 0 ? 10 : precedence.Values.Max() + 1;
}
public IReadOnlyList<AffectedPackage> Merge(IEnumerable<AffectedPackage> packages)
{
ArgumentNullException.ThrowIfNull(packages);
var grouped = packages
.Where(static pkg => pkg is not null)
.GroupBy(pkg => (pkg.Type, pkg.Identifier, pkg.Platform ?? string.Empty));
var resolved = new List<AffectedPackage>();
foreach (var group in grouped)
{
var ordered = group
.OrderBy(GetPrecedence)
.ThenByDescending(static pkg => pkg.Provenance.Length)
.ThenByDescending(static pkg => pkg.VersionRanges.Length);
var primary = ordered.First();
var provenance = ordered
.SelectMany(static pkg => pkg.Provenance)
.Where(static p => p is not null)
.Distinct()
.ToImmutableArray();
var statuses = ordered
.SelectMany(static pkg => pkg.Statuses)
.Distinct(AffectedPackageStatusEqualityComparer.Instance)
.ToImmutableArray();
var merged = new AffectedPackage(
primary.Type,
primary.Identifier,
string.IsNullOrWhiteSpace(primary.Platform) ? null : primary.Platform,
primary.VersionRanges,
statuses,
provenance);
resolved.Add(merged);
}
return resolved
.OrderBy(static pkg => pkg.Type, StringComparer.Ordinal)
.ThenBy(static pkg => pkg.Identifier, StringComparer.Ordinal)
.ThenBy(static pkg => pkg.Platform, StringComparer.Ordinal)
.ToImmutableArray();
}
private int GetPrecedence(AffectedPackage package)
{
var bestRank = _fallbackRank;
foreach (var provenance in package.Provenance)
{
if (provenance is null || string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
if (_precedence.TryGetValue(provenance.Source, out var rank) && rank < bestRank)
{
bestRank = rank;
}
}
return bestRank;
}
}

View File

@@ -0,0 +1,25 @@
namespace StellaOps.Feedser.Merge.Services;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Feedser.Models;
/// <summary>
/// Computes deterministic hashes over canonical advisory JSON payloads.
/// </summary>
public sealed class CanonicalHashCalculator
{
private static readonly UTF8Encoding Utf8NoBom = new(false);
public byte[] ComputeHash(Advisory? advisory)
{
if (advisory is null)
{
return Array.Empty<byte>();
}
var canonical = CanonicalJsonSerializer.Serialize(CanonicalJsonSerializer.Normalize(advisory));
var payload = Utf8NoBom.GetBytes(canonical);
return SHA256.HashData(payload);
}
}

View File

@@ -0,0 +1,70 @@
namespace StellaOps.Feedser.Merge.Services;
using System.Security.Cryptography;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Storage.Mongo.MergeEvents;
/// <summary>
/// Persists merge events with canonical before/after hashes for auditability.
/// </summary>
public sealed class MergeEventWriter
{
private readonly IMergeEventStore _mergeEventStore;
private readonly CanonicalHashCalculator _hashCalculator;
private readonly TimeProvider _timeProvider;
private readonly ILogger<MergeEventWriter> _logger;
public MergeEventWriter(
IMergeEventStore mergeEventStore,
CanonicalHashCalculator hashCalculator,
TimeProvider timeProvider,
ILogger<MergeEventWriter> logger)
{
_mergeEventStore = mergeEventStore ?? throw new ArgumentNullException(nameof(mergeEventStore));
_hashCalculator = hashCalculator ?? throw new ArgumentNullException(nameof(hashCalculator));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<MergeEventRecord> AppendAsync(
string advisoryKey,
Advisory? before,
Advisory after,
IReadOnlyList<Guid> inputDocumentIds,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryKey);
ArgumentNullException.ThrowIfNull(after);
var beforeHash = _hashCalculator.ComputeHash(before);
var afterHash = _hashCalculator.ComputeHash(after);
var timestamp = _timeProvider.GetUtcNow();
var documentIds = inputDocumentIds?.ToArray() ?? Array.Empty<Guid>();
var record = new MergeEventRecord(
Guid.NewGuid(),
advisoryKey,
beforeHash,
afterHash,
timestamp,
documentIds);
if (!CryptographicOperations.FixedTimeEquals(beforeHash, afterHash))
{
_logger.LogInformation(
"Merge event for {AdvisoryKey} changed hash {BeforeHash} -> {AfterHash}",
advisoryKey,
Convert.ToHexString(beforeHash),
Convert.ToHexString(afterHash));
}
else
{
_logger.LogInformation("Merge event for {AdvisoryKey} recorded without hash change", advisoryKey);
}
await _mergeEventStore.AppendAsync(record, cancellationToken).ConfigureAwait(false);
return record;
}
}

View File

@@ -1,4 +1,5 @@
<Project Sdk="Microsoft.NET.Sdk">
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
@@ -7,7 +8,9 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Semver" Version="2.3.0" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,13 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|Identity graph and alias resolver|BE-Merge|Models, Storage.Mongo|Deterministic key choice; cycle-safe.|
|Precedence policy engine|BE-Merge|Architecture|PSIRT/OVAL > NVD; CERTs enrich; KEV flag.|
|NEVRA comparer plus tests|BE-Merge (Distro WG)|Source.Distro fixtures|DONE Added Nevra parser/comparer with tilde-aware rpm ordering and unit coverage.|
|Debian EVR comparer plus tests|BE-Merge (Distro WG)|Debian fixtures|DONE DebianEvr comparer mirrors dpkg ordering with tilde/epoch handling and unit coverage.|
|SemVer range resolver plus tests|BE-Merge (OSS WG)|OSV/GHSA fixtures|DONE SemanticVersionRangeResolver covers introduced/fixed/lastAffected semantics with SemVer ordering tests.|
|Canonical hash and merge_event writer|BE-Merge|Models, Storage.Mongo|DONE Hash calculator + MergeEventWriter compute canonical SHA-256 digests and persist merge events.|
|Conflict detection and metrics|BE-Merge|Core|Counters; structured logs; traces.|
|End-to-end determinism test|QA|Merge, key connectors|Same inputs -> same hashes.|
|Override audit logging|BE-Merge|Observability|DOING structured override logging and metrics emitted; await production telemetry review.|
|Configurable precedence table|BE-Merge|Architecture|DOING precedence overrides now accepted via options; document operator workflow.|

View File

@@ -0,0 +1,25 @@
# AGENTS
## Role
Canonical data model for normalized advisories and all downstream serialization. Source of truth for merge/export.
## Scope
- Canonical types: Advisory, AdvisoryReference, CvssMetric, AffectedPackage, AffectedVersionRange, AdvisoryProvenance.
- Invariants: stable ordering, culture-invariant serialization, UTC timestamps, deterministic equality semantics.
- Field semantics: preserve all aliases/references; ranges per ecosystem (NEVRA/EVR/SemVer); provenance on every mapped field.
- Backward/forward compatibility: additive evolution; versioned DTOs where needed; no breaking field renames.
- Detailed field coverage documented in `CANONICAL_RECORDS.md`; update alongside model changes.
## Participants
- Source connectors map external DTOs into these types.
- Merge engine composes/overrides AffectedPackage sets and consolidates references/aliases.
- Exporters serialize canonical documents deterministically.
## Interfaces & contracts
- Null-object statics: Advisory.Empty, AdvisoryReference.Empty, CvssMetric.Empty.
- AffectedPackage.Type describes semantics (e.g., rpm, deb, cpe, semver). Identifier is stable (e.g., NEVRA, PURL, CPE).
- Version ranges list is ordered by introduction then fix; provenance identifies source/kind/value/recordedAt.
- Alias schemes must include CVE, GHSA, OSV, JVN/JVNDB, BDU, VU(CERT/CC), MSRC, CISCO-SA, ORACLE-CPU, APSB/APA, APPLE-HT, CHROMIUM-POST, VMSA, RHSA, USN, DSA, SUSE-SU, ICSA, CWE, CPE, PURL.
## In/Out of scope
In: data shapes, invariants, helpers for canonical serialization and comparison.
Out: fetching/parsing external schemas, storage, HTTP.
## Observability & security expectations
- No secrets; purely in-memory types.
- Provide debug renders for test snapshots (canonical JSON).
- Emit model version identifiers in logs when canonical structures change; keep adapters for older readers until deprecated.

View File

@@ -0,0 +1,145 @@
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Canonical advisory document produced after merge. Collections are pre-sorted for deterministic serialization.
/// </summary>
public sealed record Advisory
{
public static Advisory Empty { get; } = new(
advisoryKey: "unknown",
title: "",
summary: null,
language: null,
published: null,
modified: null,
severity: null,
exploitKnown: false,
aliases: Array.Empty<string>(),
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
public Advisory(
string advisoryKey,
string title,
string? summary,
string? language,
DateTimeOffset? published,
DateTimeOffset? modified,
string? severity,
bool exploitKnown,
IEnumerable<string>? aliases,
IEnumerable<AdvisoryReference>? references,
IEnumerable<AffectedPackage>? affectedPackages,
IEnumerable<CvssMetric>? cvssMetrics,
IEnumerable<AdvisoryProvenance>? provenance)
{
AdvisoryKey = Validation.EnsureNotNullOrWhiteSpace(advisoryKey, nameof(advisoryKey));
Title = Validation.EnsureNotNullOrWhiteSpace(title, nameof(title));
Summary = Validation.TrimToNull(summary);
Language = Validation.TrimToNull(language)?.ToLowerInvariant();
Published = published?.ToUniversalTime();
Modified = modified?.ToUniversalTime();
Severity = SeverityNormalization.Normalize(severity);
ExploitKnown = exploitKnown;
Aliases = (aliases ?? Array.Empty<string>())
.Select(static alias => Validation.TryNormalizeAlias(alias, out var normalized) ? normalized! : null)
.Where(static alias => alias is not null)
.Distinct(StringComparer.Ordinal)
.OrderBy(static alias => alias, StringComparer.Ordinal)
.Select(static alias => alias!)
.ToImmutableArray();
References = (references ?? Array.Empty<AdvisoryReference>())
.Where(static reference => reference is not null)
.OrderBy(static reference => reference.Url, StringComparer.Ordinal)
.ThenBy(static reference => reference.Kind, StringComparer.Ordinal)
.ThenBy(static reference => reference.SourceTag, StringComparer.Ordinal)
.ThenBy(static reference => reference.Provenance.RecordedAt)
.ToImmutableArray();
AffectedPackages = (affectedPackages ?? Array.Empty<AffectedPackage>())
.Where(static package => package is not null)
.OrderBy(static package => package.Type, StringComparer.Ordinal)
.ThenBy(static package => package.Identifier, StringComparer.Ordinal)
.ThenBy(static package => package.Platform, StringComparer.Ordinal)
.ToImmutableArray();
CvssMetrics = (cvssMetrics ?? Array.Empty<CvssMetric>())
.Where(static metric => metric is not null)
.OrderBy(static metric => metric.Version, StringComparer.Ordinal)
.ThenBy(static metric => metric.Vector, StringComparer.Ordinal)
.ToImmutableArray();
Provenance = (provenance ?? Array.Empty<AdvisoryProvenance>())
.Where(static p => p is not null)
.OrderBy(static p => p.Source, StringComparer.Ordinal)
.ThenBy(static p => p.Kind, StringComparer.Ordinal)
.ThenBy(static p => p.RecordedAt)
.ToImmutableArray();
}
[JsonConstructor]
public Advisory(
string advisoryKey,
string title,
string? summary,
string? language,
DateTimeOffset? published,
DateTimeOffset? modified,
string? severity,
bool exploitKnown,
ImmutableArray<string> aliases,
ImmutableArray<AdvisoryReference> references,
ImmutableArray<AffectedPackage> affectedPackages,
ImmutableArray<CvssMetric> cvssMetrics,
ImmutableArray<AdvisoryProvenance> provenance)
: this(
advisoryKey,
title,
summary,
language,
published,
modified,
severity,
exploitKnown,
aliases.IsDefault ? null : aliases.AsEnumerable(),
references.IsDefault ? null : references.AsEnumerable(),
affectedPackages.IsDefault ? null : affectedPackages.AsEnumerable(),
cvssMetrics.IsDefault ? null : cvssMetrics.AsEnumerable(),
provenance.IsDefault ? null : provenance.AsEnumerable())
{
}
public string AdvisoryKey { get; }
public string Title { get; }
public string? Summary { get; }
public string? Language { get; }
public DateTimeOffset? Published { get; }
public DateTimeOffset? Modified { get; }
public string? Severity { get; }
public bool ExploitKnown { get; }
public ImmutableArray<string> Aliases { get; }
public ImmutableArray<AdvisoryReference> References { get; }
public ImmutableArray<AffectedPackage> AffectedPackages { get; }
public ImmutableArray<CvssMetric> CvssMetrics { get; }
public ImmutableArray<AdvisoryProvenance> Provenance { get; }
}

View File

@@ -0,0 +1,28 @@
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Describes the origin of a canonical field and how/when it was captured.
/// </summary>
public sealed record AdvisoryProvenance
{
public static AdvisoryProvenance Empty { get; } = new("unknown", "unspecified", string.Empty, DateTimeOffset.UnixEpoch);
[JsonConstructor]
public AdvisoryProvenance(string source, string kind, string value, DateTimeOffset recordedAt)
{
Source = Validation.EnsureNotNullOrWhiteSpace(source, nameof(source));
Kind = Validation.EnsureNotNullOrWhiteSpace(kind, nameof(kind));
Value = Validation.TrimToNull(value);
RecordedAt = recordedAt.ToUniversalTime();
}
public string Source { get; }
public string Kind { get; }
public string? Value { get; }
public DateTimeOffset RecordedAt { get; }
}

View File

@@ -0,0 +1,36 @@
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Canonical external reference associated with an advisory.
/// </summary>
public sealed record AdvisoryReference
{
public static AdvisoryReference Empty { get; } = new("https://invalid.local/", kind: null, sourceTag: null, summary: null, provenance: AdvisoryProvenance.Empty);
[JsonConstructor]
public AdvisoryReference(string url, string? kind, string? sourceTag, string? summary, AdvisoryProvenance provenance)
{
if (!Validation.LooksLikeHttpUrl(url))
{
throw new ArgumentException("Reference URL must be an absolute http(s) URI.", nameof(url));
}
Url = url;
Kind = Validation.TrimToNull(kind);
SourceTag = Validation.TrimToNull(sourceTag);
Summary = Validation.TrimToNull(summary);
Provenance = provenance ?? AdvisoryProvenance.Empty;
}
public string Url { get; }
public string? Kind { get; }
public string? SourceTag { get; }
public string? Summary { get; }
public AdvisoryProvenance Provenance { get; }
}

View File

@@ -0,0 +1,87 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Canonical affected package descriptor with deterministic ordering of ranges and provenance.
/// </summary>
public sealed record AffectedPackage
{
public static AffectedPackage Empty { get; } = new(
AffectedPackageTypes.SemVer,
identifier: "unknown",
platform: null,
versionRanges: Array.Empty<AffectedVersionRange>(),
statuses: Array.Empty<AffectedPackageStatus>(),
provenance: Array.Empty<AdvisoryProvenance>());
[JsonConstructor]
public AffectedPackage(
string type,
string identifier,
string? platform = null,
IEnumerable<AffectedVersionRange>? versionRanges = null,
IEnumerable<AffectedPackageStatus>? statuses = null,
IEnumerable<AdvisoryProvenance>? provenance = null)
{
Type = Validation.EnsureNotNullOrWhiteSpace(type, nameof(type)).ToLowerInvariant();
Identifier = Validation.EnsureNotNullOrWhiteSpace(identifier, nameof(identifier));
Platform = Validation.TrimToNull(platform);
VersionRanges = (versionRanges ?? Array.Empty<AffectedVersionRange>())
.Distinct(AffectedVersionRangeEqualityComparer.Instance)
.OrderBy(static range => range, AffectedVersionRangeComparer.Instance)
.ToImmutableArray();
Statuses = (statuses ?? Array.Empty<AffectedPackageStatus>())
.Where(static status => status is not null)
.Distinct(AffectedPackageStatusEqualityComparer.Instance)
.OrderBy(static status => status.Status, StringComparer.Ordinal)
.ThenBy(static status => status.Provenance.Source, StringComparer.Ordinal)
.ThenBy(static status => status.Provenance.Kind, StringComparer.Ordinal)
.ThenBy(static status => status.Provenance.RecordedAt)
.ToImmutableArray();
Provenance = (provenance ?? Array.Empty<AdvisoryProvenance>())
.Where(static p => p is not null)
.OrderBy(static p => p.Source, StringComparer.Ordinal)
.ThenBy(static p => p.Kind, StringComparer.Ordinal)
.ThenBy(static p => p.RecordedAt)
.ToImmutableArray();
}
/// <summary>
/// Semantic type of the coordinates (rpm, deb, cpe, semver, vendor, ics-vendor).
/// </summary>
public string Type { get; }
/// <summary>
/// Canonical identifier for the package (NEVRA, PackageURL, CPE string, vendor slug, etc.).
/// </summary>
public string Identifier { get; }
public string? Platform { get; }
public ImmutableArray<AffectedVersionRange> VersionRanges { get; }
public ImmutableArray<AffectedPackageStatus> Statuses { get; }
public ImmutableArray<AdvisoryProvenance> Provenance { get; }
}
/// <summary>
/// Known values for <see cref="AffectedPackage.Type"/>.
/// </summary>
public static class AffectedPackageTypes
{
public const string Rpm = "rpm";
public const string Deb = "deb";
public const string Cpe = "cpe";
public const string SemVer = "semver";
public const string Vendor = "vendor";
public const string IcsVendor = "ics-vendor";
}

View File

@@ -0,0 +1,46 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Represents a vendor-supplied status tag for an affected package when a concrete version range is unavailable or supplementary.
/// </summary>
public sealed record AffectedPackageStatus
{
[JsonConstructor]
public AffectedPackageStatus(string status, AdvisoryProvenance provenance)
{
Status = AffectedPackageStatusCatalog.Normalize(status);
Provenance = provenance ?? AdvisoryProvenance.Empty;
}
public string Status { get; }
public AdvisoryProvenance Provenance { get; }
}
public sealed class AffectedPackageStatusEqualityComparer : IEqualityComparer<AffectedPackageStatus>
{
public static AffectedPackageStatusEqualityComparer Instance { get; } = new();
public bool Equals(AffectedPackageStatus? x, AffectedPackageStatus? y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (x is null || y is null)
{
return false;
}
return string.Equals(x.Status, y.Status, StringComparison.Ordinal)
&& EqualityComparer<AdvisoryProvenance>.Default.Equals(x.Provenance, y.Provenance);
}
public int GetHashCode(AffectedPackageStatus obj)
=> HashCode.Combine(obj.Status, obj.Provenance);
}

View File

@@ -0,0 +1,55 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Central registry of allowed affected-package status labels to keep connectors consistent.
/// </summary>
public static class AffectedPackageStatusCatalog
{
public const string KnownAffected = "known_affected";
public const string KnownNotAffected = "known_not_affected";
public const string UnderInvestigation = "under_investigation";
public const string Fixed = "fixed";
public const string FirstFixed = "first_fixed";
public const string Mitigated = "mitigated";
public const string NotApplicable = "not_applicable";
public const string Affected = "affected";
public const string NotAffected = "not_affected";
public const string Pending = "pending";
public const string Unknown = "unknown";
private static readonly HashSet<string> AllowedStatuses = new(StringComparer.OrdinalIgnoreCase)
{
KnownAffected,
KnownNotAffected,
UnderInvestigation,
Fixed,
FirstFixed,
Mitigated,
NotApplicable,
Affected,
NotAffected,
Pending,
Unknown,
};
public static IReadOnlyCollection<string> Allowed => AllowedStatuses;
public static string Normalize(string status)
{
if (string.IsNullOrWhiteSpace(status))
{
throw new ArgumentException("Status must be provided.", nameof(status));
}
var token = status.Trim().ToLowerInvariant().Replace(' ', '_').Replace('-', '_');
if (!AllowedStatuses.Contains(token))
{
throw new ArgumentOutOfRangeException(nameof(status), status, "Status is not part of the allowed affected-package status glossary.");
}
return token;
}
}

View File

@@ -0,0 +1,145 @@
using System.Text.Json.Serialization;
namespace StellaOps.Feedser.Models;
/// <summary>
/// Describes a contiguous range of versions impacted by an advisory.
/// </summary>
public sealed record AffectedVersionRange
{
[JsonConstructor]
public AffectedVersionRange(
string rangeKind,
string? introducedVersion,
string? fixedVersion,
string? lastAffectedVersion,
string? rangeExpression,
AdvisoryProvenance provenance)
{
RangeKind = Validation.EnsureNotNullOrWhiteSpace(rangeKind, nameof(rangeKind)).ToLowerInvariant();
IntroducedVersion = Validation.TrimToNull(introducedVersion);
FixedVersion = Validation.TrimToNull(fixedVersion);
LastAffectedVersion = Validation.TrimToNull(lastAffectedVersion);
RangeExpression = Validation.TrimToNull(rangeExpression);
Provenance = provenance ?? AdvisoryProvenance.Empty;
}
/// <summary>
/// Semantic kind of the range (e.g., semver, nevra, evr).
/// </summary>
public string RangeKind { get; }
/// <summary>
/// Inclusive version where impact begins.
/// </summary>
public string? IntroducedVersion { get; }
/// <summary>
/// Exclusive version where impact ends due to a fix.
/// </summary>
public string? FixedVersion { get; }
/// <summary>
/// Inclusive upper bound where the vendor reports exposure (when no fix available).
/// </summary>
public string? LastAffectedVersion { get; }
/// <summary>
/// Normalized textual representation of the range (fallback).
/// </summary>
public string? RangeExpression { get; }
public AdvisoryProvenance Provenance { get; }
public string CreateDeterministicKey()
=> string.Join('|', RangeKind, IntroducedVersion ?? string.Empty, FixedVersion ?? string.Empty, LastAffectedVersion ?? string.Empty, RangeExpression ?? string.Empty);
}
/// <summary>
/// Deterministic comparer for version ranges. Orders by introduced, fixed, last affected, expression, kind.
/// </summary>
public sealed class AffectedVersionRangeComparer : IComparer<AffectedVersionRange>
{
public static AffectedVersionRangeComparer Instance { get; } = new();
private static readonly StringComparer Comparer = StringComparer.Ordinal;
public int Compare(AffectedVersionRange? x, AffectedVersionRange? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var compare = Comparer.Compare(x.IntroducedVersion, y.IntroducedVersion);
if (compare != 0)
{
return compare;
}
compare = Comparer.Compare(x.FixedVersion, y.FixedVersion);
if (compare != 0)
{
return compare;
}
compare = Comparer.Compare(x.LastAffectedVersion, y.LastAffectedVersion);
if (compare != 0)
{
return compare;
}
compare = Comparer.Compare(x.RangeExpression, y.RangeExpression);
if (compare != 0)
{
return compare;
}
return Comparer.Compare(x.RangeKind, y.RangeKind);
}
}
/// <summary>
/// Equality comparer that ignores provenance differences.
/// </summary>
public sealed class AffectedVersionRangeEqualityComparer : IEqualityComparer<AffectedVersionRange>
{
public static AffectedVersionRangeEqualityComparer Instance { get; } = new();
public bool Equals(AffectedVersionRange? x, AffectedVersionRange? y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (x is null || y is null)
{
return false;
}
return string.Equals(x.RangeKind, y.RangeKind, StringComparison.Ordinal)
&& string.Equals(x.IntroducedVersion, y.IntroducedVersion, StringComparison.Ordinal)
&& string.Equals(x.FixedVersion, y.FixedVersion, StringComparison.Ordinal)
&& string.Equals(x.LastAffectedVersion, y.LastAffectedVersion, StringComparison.Ordinal)
&& string.Equals(x.RangeExpression, y.RangeExpression, StringComparison.Ordinal);
}
public int GetHashCode(AffectedVersionRange obj)
=> HashCode.Combine(
obj.RangeKind,
obj.IntroducedVersion,
obj.FixedVersion,
obj.LastAffectedVersion,
obj.RangeExpression);
}

View File

@@ -0,0 +1,166 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.RegularExpressions;
namespace StellaOps.Feedser.Models;
public static class AliasSchemeRegistry
{
private sealed record AliasScheme(
string Name,
Func<string?, bool> Predicate,
Func<string?, string> Normalizer);
private static readonly AliasScheme[] SchemeDefinitions =
{
BuildScheme(AliasSchemes.Cve, alias => alias is not null && Matches(CvERegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "CVE")),
BuildScheme(AliasSchemes.Ghsa, alias => alias is not null && Matches(GhsaRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "GHSA")),
BuildScheme(AliasSchemes.OsV, alias => alias is not null && Matches(OsVRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "OSV")),
BuildScheme(AliasSchemes.Jvn, alias => alias is not null && Matches(JvnRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "JVN")),
BuildScheme(AliasSchemes.Jvndb, alias => alias is not null && Matches(JvndbRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "JVNDB")),
BuildScheme(AliasSchemes.Bdu, alias => alias is not null && Matches(BduRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "BDU")),
BuildScheme(AliasSchemes.Vu, alias => alias is not null && alias.StartsWith("VU#", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "VU", preserveSeparator: '#')),
BuildScheme(AliasSchemes.Msrc, alias => alias is not null && alias.StartsWith("MSRC-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "MSRC")),
BuildScheme(AliasSchemes.CiscoSa, alias => alias is not null && alias.StartsWith("CISCO-SA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "CISCO-SA")),
BuildScheme(AliasSchemes.OracleCpu, alias => alias is not null && alias.StartsWith("ORACLE-CPU", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "ORACLE-CPU")),
BuildScheme(AliasSchemes.Apsb, alias => alias is not null && alias.StartsWith("APSB-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "APSB")),
BuildScheme(AliasSchemes.Apa, alias => alias is not null && alias.StartsWith("APA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "APA")),
BuildScheme(AliasSchemes.AppleHt, alias => alias is not null && alias.StartsWith("APPLE-HT", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "APPLE-HT")),
BuildScheme(AliasSchemes.ChromiumPost, alias => alias is not null && (alias.StartsWith("CHROMIUM-POST", StringComparison.OrdinalIgnoreCase) || alias.StartsWith("CHROMIUM:", StringComparison.OrdinalIgnoreCase)), NormalizeChromium),
BuildScheme(AliasSchemes.Vmsa, alias => alias is not null && alias.StartsWith("VMSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "VMSA")),
BuildScheme(AliasSchemes.Rhsa, alias => alias is not null && alias.StartsWith("RHSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "RHSA")),
BuildScheme(AliasSchemes.Usn, alias => alias is not null && alias.StartsWith("USN-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "USN")),
BuildScheme(AliasSchemes.Dsa, alias => alias is not null && alias.StartsWith("DSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "DSA")),
BuildScheme(AliasSchemes.SuseSu, alias => alias is not null && alias.StartsWith("SUSE-SU-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "SUSE-SU")),
BuildScheme(AliasSchemes.Icsa, alias => alias is not null && alias.StartsWith("ICSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "ICSA")),
BuildScheme(AliasSchemes.Cwe, alias => alias is not null && Matches(CweRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "CWE")),
BuildScheme(AliasSchemes.Cpe, alias => alias is not null && alias.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "cpe", uppercase:false)),
BuildScheme(AliasSchemes.Purl, alias => alias is not null && alias.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "pkg", uppercase:false)),
};
private static AliasScheme BuildScheme(string name, Func<string?, bool> predicate, Func<string?, string> normalizer)
=> new(
name,
predicate,
alias => normalizer(alias));
private static readonly ImmutableHashSet<string> SchemeNames = SchemeDefinitions
.Select(static scheme => scheme.Name)
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
private static readonly Regex CvERegex = new("^CVE-\\d{4}-\\d{4,}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex GhsaRegex = new("^GHSA-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex OsVRegex = new("^OSV-\\d{4}-\\d+$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex JvnRegex = new("^JVN-\\d{4}-\\d{6}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex JvndbRegex = new("^JVNDB-\\d{4}-\\d{6}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex BduRegex = new("^BDU-\\d{4}-\\d+$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private static readonly Regex CweRegex = new("^CWE-\\d+$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
public static IReadOnlyCollection<string> KnownSchemes => SchemeNames;
public static bool IsKnownScheme(string? scheme)
=> !string.IsNullOrWhiteSpace(scheme) && SchemeNames.Contains(scheme);
public static bool TryGetScheme(string? alias, out string scheme)
{
if (string.IsNullOrWhiteSpace(alias))
{
scheme = string.Empty;
return false;
}
var candidate = alias.Trim();
foreach (var entry in SchemeDefinitions)
{
if (entry.Predicate(candidate))
{
scheme = entry.Name;
return true;
}
}
scheme = string.Empty;
return false;
}
public static bool TryNormalize(string? alias, out string normalized, out string scheme)
{
normalized = string.Empty;
scheme = string.Empty;
if (string.IsNullOrWhiteSpace(alias))
{
return false;
}
var candidate = alias.Trim();
foreach (var entry in SchemeDefinitions)
{
if (entry.Predicate(candidate))
{
scheme = entry.Name;
normalized = entry.Normalizer(candidate);
return true;
}
}
normalized = candidate;
return false;
}
private static string NormalizePrefix(string? alias, string prefix, bool uppercase = true, char? preserveSeparator = null)
{
if (string.IsNullOrWhiteSpace(alias))
{
return string.Empty;
}
var comparison = StringComparison.OrdinalIgnoreCase;
if (!alias.StartsWith(prefix, comparison))
{
return uppercase ? alias : alias.ToLowerInvariant();
}
var remainder = alias[prefix.Length..];
if (preserveSeparator is { } separator && remainder.Length > 0 && remainder[0] != separator)
{
// Edge case: alias is expected to use a specific separator but does not return unchanged.
return uppercase ? prefix.ToUpperInvariant() + remainder : prefix + remainder;
}
var normalizedPrefix = uppercase ? prefix.ToUpperInvariant() : prefix.ToLowerInvariant();
return normalizedPrefix + remainder;
}
private static string NormalizeChromium(string? alias)
{
if (string.IsNullOrWhiteSpace(alias))
{
return string.Empty;
}
if (alias.StartsWith("CHROMIUM-POST", StringComparison.OrdinalIgnoreCase))
{
return NormalizePrefix(alias, "CHROMIUM-POST");
}
if (alias.StartsWith("CHROMIUM:", StringComparison.OrdinalIgnoreCase))
{
var remainder = alias["CHROMIUM".Length..];
return "CHROMIUM" + remainder;
}
return alias;
}
private static bool Matches(Regex? regex, string? candidate)
{
if (regex is null || string.IsNullOrWhiteSpace(candidate))
{
return false;
}
return regex.IsMatch(candidate);
}
}

View File

@@ -0,0 +1,31 @@
namespace StellaOps.Feedser.Models;
/// <summary>
/// Well-known alias scheme identifiers referenced throughout the pipeline.
/// </summary>
public static class AliasSchemes
{
public const string Cve = "CVE";
public const string Ghsa = "GHSA";
public const string OsV = "OSV";
public const string Jvn = "JVN";
public const string Jvndb = "JVNDB";
public const string Bdu = "BDU";
public const string Vu = "VU";
public const string Msrc = "MSRC";
public const string CiscoSa = "CISCO-SA";
public const string OracleCpu = "ORACLE-CPU";
public const string Apsb = "APSB";
public const string Apa = "APA";
public const string AppleHt = "APPLE-HT";
public const string ChromiumPost = "CHROMIUM-POST";
public const string Vmsa = "VMSA";
public const string Rhsa = "RHSA";
public const string Usn = "USN";
public const string Dsa = "DSA";
public const string SuseSu = "SUSE-SU";
public const string Icsa = "ICSA";
public const string Cwe = "CWE";
public const string Cpe = "CPE";
public const string Purl = "PURL";
}

View File

@@ -0,0 +1,41 @@
# Canonical Model Backward-Compatibility Playbook
This playbook captures the policies and workflow required when evolving the canonical
`StellaOps.Feedser.Models` surface.
## Principles
- **Additive by default** breaking field removals/renames are not allowed without a staged
migration plan.
- **Version-the-writer** any change to serialization that affects downstream consumers must bump
the exporter version string and update `CANONICAL_RECORDS.md`.
- **Schema-first** update documentation (`CANONICAL_RECORDS.md`) and corresponding tests before
shipping new fields.
- **Dual-read period** when introducing a new field, keep old readers working by:
1. Making the field optional in the canonical model.
2. Providing default behavior in exporters/mergers when the field is absent.
3. Communicating via release notes and toggles when the field will become required.
## Workflow for Changes
1. **Proposal** raise an issue describing the motivation, affected records, and compatibility
impact. Link to the relevant task in `TASKS.md`.
2. **Docs + Tests first** update `CANONICAL_RECORDS.md`, add/adjust golden fixtures, and extend
regression tests (hash comparisons, snapshot assertions) to capture the new shape.
3. **Implementation** introduce the model change along with migration logic (e.g., mergers filling
defaults, exporters emitting the new payload).
4. **Exporter bump** update exporter version manifests (`ExporterVersion.GetVersion`) whenever the
serialized payload differs.
5. **Announcement** document the change in release notes, highlighting optional vs. required
timelines.
6. **Cleanup** once consumers have migrated, remove transitional logic and update docs/tests to
reflect the permanent shape.
## Testing Checklist
- `StellaOps.Feedser.Tests.Models` update unit tests and golden examples.
- `Serialization determinism` ensure the hash regression tests cover the new fields.
- Exporter integration (`Json`, `TrivyDb`) confirm manifests include provenance + tree metadata
for the new shape.
Following this playbook keeps canonical payloads stable while allowing incremental evolution.

View File

@@ -0,0 +1,128 @@
# Canonical Record Definitions
> Source of truth for the normalized advisory schema emitted by `StellaOps.Feedser.Models`.
> Keep this document in sync with the public record types under `StellaOps.Feedser.Models` and
> update it whenever a new field is introduced or semantics change.
## Advisory
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `advisoryKey` | string | yes | Globally unique identifier selected by the merge layer (often a CVE/GHSA/vendor key). Stored lowercased unless vendor casing is significant. |
| `title` | string | yes | Human readable title. Must be non-empty and trimmed. |
| `summary` | string? | optional | Short description; trimmed to `null` when empty. |
| `language` | string? | optional | ISO language code (lowercase). |
| `published` | DateTimeOffset? | optional | UTC timestamp when vendor originally published. |
| `modified` | DateTimeOffset? | optional | UTC timestamp when vendor last updated. |
| `severity` | string? | optional | Normalized severity label (`critical`, `high`, etc.). |
| `exploitKnown` | bool | yes | Whether KEV/other sources confirm active exploitation. |
| `aliases` | string[] | yes | Sorted, de-duplicated list of normalized aliases (see [Alias Schemes](#alias-schemes)). |
| `references` | AdvisoryReference[] | yes | Deterministically ordered reference set. |
| `affectedPackages` | AffectedPackage[] | yes | Deterministically ordered affected packages. |
| `cvssMetrics` | CvssMetric[] | yes | Deterministically ordered CVSS metrics (v3, v4 first). |
| `provenance` | AdvisoryProvenance[] | yes | Normalized provenance entries sorted by source then kind then recorded timestamp. |
### Invariants
- Collections are immutable (`ImmutableArray<T>`) and always sorted deterministically.
- `AdvisoryKey` and `Title` are mandatory and trimmed.
- All timestamps are stored as UTC.
- Aliases and references leverage helper registries for validation.
## AdvisoryReference
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `url` | string | yes | Absolute HTTP/HTTPS URL. |
| `kind` | string? | optional | Categorized reference role (e.g. `advisory`, `patch`, `changelog`). |
| `sourceTag` | string? | optional | Free-form tag identifying originating source. |
| `summary` | string? | optional | Short description. |
| `provenance` | AdvisoryProvenance | yes | Provenance entry describing how the reference was mapped. |
Deterministic ordering: by `url`, then `kind`, then `sourceTag`, then `provenance.RecordedAt`.
## AffectedPackage
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `type` | string | yes | Semantic type (`semver`, `rpm`, `deb`, `purl`, `cpe`, etc.). Lowercase. |
| `identifier` | string | yes | Canonical identifier (package name, PURL, CPE, NEVRA, etc.). |
| `platform` | string? | optional | Explicit platform / distro (e.g. `ubuntu`, `rhel-8`). |
| `versionRanges` | AffectedVersionRange[] | yes | Deduplicated + sorted by introduced/fixed/last/expr/kind. |
| `statuses` | AffectedPackageStatus[] | yes | Optional status flags (e.g. `fixed`, `affected`). |
| `provenance` | AdvisoryProvenance[] | yes | Provenance entries for package level metadata. |
Deterministic ordering: packages sorted by `type`, then `identifier`, then `platform` (ordinal).
## AffectedVersionRange
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `rangeKind` | string | yes | Classification of range semantics (`semver`, `evr`, `nevra`, `version`, `purl`). Lowercase. |
| `introducedVersion` | string? | optional | Inclusive lower bound when impact begins. |
| `fixedVersion` | string? | optional | Exclusive bounding version containing the fix. |
| `lastAffectedVersion` | string? | optional | Inclusive upper bound when no fix exists. |
| `rangeExpression` | string? | optional | Normalized textual expression for non-simple ranges. |
| `provenance` | AdvisoryProvenance | yes | Provenance entry for the range. |
Comparers/equality ignore provenance differences.
## CvssMetric
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `version` | string | yes | `2.0`, `3.0`, `3.1`, `4.0`, etc. |
| `vector` | string | yes | Official CVSS vector string. |
| `score` | double | yes | CVSS base score (0.0-10.0). |
| `severity` | string | yes | Severity label mapped from score or vendor metadata. |
| `provenance` | AdvisoryProvenance | yes | Provenance entry. |
Sorted by version then vector for determinism.
## AdvisoryProvenance
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `source` | string | yes | Logical source identifier (`nvd`, `redhat`, `osv`, etc.). |
| `kind` | string | yes | Operation performed (`fetch`, `parse`, `map`, `merge`, `enrich`). |
| `detail` | string | optional | Free-form pipeline detail (parser identifier, rule set). |
| `recordedAt` | DateTimeOffset | yes | UTC timestamp when provenance was captured. |
### Provenance Mask Expectations
Each canonical field is expected to carry at least one provenance entry derived from the
responsible pipeline stage. When aggregating provenance from subcomponents (e.g., affected package
ranges), merge code should ensure:
- Advisory level provenance documents the source document and merge actions.
- References, packages, ranges, and metrics each include their own provenance entry reflecting
the most specific source (vendor feed, computed normalization, etc.).
- Export-specific metadata (digest manifests, offline bundles) include exporter version alongside
the builder metadata.
## Alias Schemes
Supported alias scheme prefixes:
- `CVE-`
- `GHSA-`
- `OSV-`
- `JVN-`, `JVNDB-`
- `BDU-`
- `VU#`
- `MSRC-`
- `CISCO-SA-`
- `ORACLE-CPU`
- `APSB-`, `APA-`
- `APPLE-HT`
- `CHROMIUM:` / `CHROMIUM-`
- `VMSA-`
- `RHSA-`
- `USN-`
- `DSA-`
- `SUSE-SU-`
- `ICSA-`
- `CWE-`
- `cpe:`
- `pkg:` (Package URL / PURL)
The registry exposed via `AliasSchemes` and `AliasSchemeRegistry` can be used to validate aliases and
drive downstream conditionals without re-implementing pattern rules.

Some files were not shown because too many files have changed in this diff Show More